10416 server catalog v1 support desired
authorShawn Walker <srw@sun.com>
Fri, 23 Oct 2009 17:43:37 -0500
changeset 1431 62b6033670e4
parent 1430 b7ce3a0a17eb
child 1432 e2a449338bc5
10416 server catalog v1 support desired 243 need localized descriptions, other metadata at catalog level 2424 Need to use UTC consistently everywhere 3092 messaging api/framework needed for pkg clients (cli, gui, etc.) 7063 "pkg list -a -s" needs performance improvement 7163 manifests are modified by client before being written to disk 8217 package fmri should be added to manifest during publishing 9061 importer should not refresh indexes 9446 traceback for cfg_cache operations if read-only filesystem 10415 client catalog v1 support desired 11094 Client transport for catalog v1 11523 only permit FMRIs from same publisher for network repositories 11831 server api version incompatible templates can cause traceback 11832 depot needs ability to seed / alter repository configuration 11954 importer shows zero packages processed unless debug enabled 12006 merge utility should have a test suite
doc/catalog-v1.txt
doc/server_api_versions.txt
src/client.py
src/depot.py
src/man/pkg.depotd.1m.txt
src/man/pkgsend.1.txt
src/modules/actions/attribute.py
src/modules/catalog.py
src/modules/client/__init__.py
src/modules/client/api.py
src/modules/client/api_errors.py
src/modules/client/bootenv.py
src/modules/client/image.py
src/modules/client/imageconfig.py
src/modules/client/imageplan.py
src/modules/client/pkgplan.py
src/modules/client/progress.py
src/modules/client/publisher.py
src/modules/client/transport/engine.py
src/modules/client/transport/exception.py
src/modules/client/transport/repo.py
src/modules/client/transport/transport.py
src/modules/depotcontroller.py
src/modules/manifest.py
src/modules/misc.py
src/modules/publish/dependencies.py
src/modules/publish/transaction.py
src/modules/server/api.py
src/modules/server/catalog.py
src/modules/server/config.py
src/modules/server/depot.py
src/modules/server/errors.py
src/modules/server/face.py
src/modules/server/feed.py
src/modules/server/repository.py
src/modules/server/repositoryconfig.py
src/modules/server/transaction.py
src/modules/updatelog.py
src/pkgdefs/SUNWipkg/prototype
src/publish.py
src/pull.py
src/tests/api/t_catalog.py
src/tests/api/t_client.py
src/tests/api/t_repositoryconfig.py
src/tests/baseline.txt
src/tests/cli/t_api.py
src/tests/cli/t_api_search.py
src/tests/cli/t_pkg_api_install.py
src/tests/cli/t_pkg_depotd.py
src/tests/cli/t_pkg_history.py
src/tests/cli/t_pkg_image_create.py
src/tests/cli/t_pkg_image_update.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_intent.py
src/tests/cli/t_pkg_list.py
src/tests/cli/t_pkg_publisher.py
src/tests/cli/t_pkg_refresh.py
src/tests/cli/t_pkg_search.py
src/tests/cli/t_pkgrecv.py
src/tests/cli/t_pkgsend.py
src/tests/cli/t_publish_api.py
src/tests/cli/t_util_merge.py
src/tests/cli/testutils.py
src/tests/gui/t_pm_rmrepo.py
src/util/distro-import/importer.py
src/util/publish/merge.py
src/web/en/base.shtml
src/web/en/catalog.shtml
src/web/en/search.shtml
src/web/index.shtml
src/web/shared.shtml
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/catalog-v1.txt	Fri Oct 23 17:43:37 2009 -0500
@@ -0,0 +1,1177 @@
+pkg(5): image packaging system
+
+CATALOG FORMAT AND CACHING PROPOSAL
+
+1.  Overview
+
+    The pkg(5) server and client catalogs currently provides a summary
+    view of the packages provided by a repository: the FMRIs of each
+    package, the last time the set of available packages changed, and
+    the total number of packages.  The server uses this information
+    for publication checks, to fulfill client requests, for search
+    indexing and analysis, and to enable browser-based access to the
+    repository via the BUI (Browser User Interface).  pkg(5) clients
+    use this information to determine what packages are available, to
+    validate user input, and to fulfill packaging operation requests.
+
+1.1  History
+
+    As development of the Image Packaging System has progressed, both
+    the server and client have increasingly required access to more
+    packaged metadata as fixes and various improvements have been
+    implemented.  This has resulted in increased demand on server and
+    client system resources when analyzing package metadata, and
+    increased processing times as well.
+
+    To address catalog performance issues, a client-side unified
+    catalog cache was implemented, and initially contained all known
+    package stems from the set of publishers defined within the image
+    configuration.  The caching mechanism was then replaced, using a
+    Python dict structure designed for efficient lookups of package
+    information by stem or FMRI and providing an ordered list of
+    versions, that was then serialized to disk.
+
+    Recently, the caching was revised to use a custom, delta-encoded
+    text format that avoided object serialization as that created an
+    implicit dependency on object versions and definitions, as well as
+    significant overhead in the on-disk footprint.  To improve package
+    metadata performance, a new cache format was created that factored
+    package manifests by the types of actions contained within, and
+    then stored each type of action in a separate file for each
+    manifest.
+
+1.2  Challenges
+
+    Despite past improvements, significant performance improvements
+    are still needed for both the server and client when processing
+    and analyzing package metadata.  The work done so far has also
+    only benefited the client, leaving server performance behind.
+    Specifically, the underlying catalog data, caching mecahnisms,
+    and catalog retrieval operations suffer from the following
+    issues:
+
+    - the catalog format used for the server and client is not
+      consistent and the server uses local time instead of UTC
+
+    - the client does not maintain a 1:1 copy of the server's catalog
+      and attributes making it difficult to verify its integrity and
+      complicates the logic needed to obtain updates
+
+    - the caching mechanisms implemented are not granular enough,
+      causing some operations to run slower than necessary as more
+      information than is needed is loaded and processed
+
+    - no efficient lookup mechanism exists for some of the metadata,
+      causing operations such as dependency calculation to require a
+      linear scan and retrieval of manifests
+
+    - the existing caching mechanisms require clients to retrieve
+      manifests for all known packages to be able to perform summary
+      listings of available packages (at least 65 MiB for a new build
+      of OpenSolaris) -- which is especially harmful to GUI clients
+      such as packagemanager(1)
+
+    - the existing caching mechanisms do not provide the information
+      needed to determine (ahead of time) what package manifests need
+      to be retrieved during packaging operations, which leaves pkg(5)
+      clients unable to provide sufficient feedback to the user during
+      plan creation such as number of bytes to be transferred, time
+      estimates, etc.
+
+    - the catalog operation and caching mechanisms offered by the
+      depot server are not extensible, and cannot accommodate new
+      metadata that may be needed to perform client operations
+      without a client and server revision
+
+    - the catalog and caching mechanisms do not account for
+      future localization needs
+
+1.3  Goals
+
+    So then, the changes proposed within this document have the
+    following goals:
+
+    - unification of the server and client catalog format and code
+
+    - simplification of catalog update and retrieval mechanisms
+
+    - improved granularity and transparency in caching mechanisms
+      allowing operations to only retrieve the information they need
+
+    - reduction of resource requirements and processing time forserver
+      and client
+
+    - increase of available metadata before long-running package
+      operations to enable improved progress and user feedback
+
+    - improved extensibility of the catalog depot operation and the
+      caching mechanisms used by the client
+
+    - unification and implementation of caching mechanisms and code
+      for client and server
+
+2.  Proposed Changes
+
+    The changes needed to accomplish the goals listed in section 1.3
+    are grouped below by the type of change.  It should be noted that
+    what is described in this document is dependent on an upcoming image
+    and repository format versioning proposal since these changes will
+    require a change to the structure of both images and repositories.
+
+2.1 Catalog Format Changes
+
+2.1.1  Current Catalog Format
+
+    To better understand the proposed changes, it may first be helpful
+    to understand the current catalog format and how it is composed.
+    Currently, the catalog could be viewd as being composed of three
+    files:
+
+    - attrs
+
+        The attrs file contains metadata about the catalog.  The
+        server and client attrs file are text/plain, and currently
+        have the following content:
+
+         S Last-Modified: 2009-06-23T07:58:35.686485
+         S prefix: CRSV
+         S npkgs: 40802
+
+        The client adds this content:
+         S origin: <repository_uri>
+
+        The Last-Modified value is an ISO-8601 date and time in server
+        local time (not UTC).
+
+    - catalog
+
+        The server catalog file currently contains entries of this
+        format:
+
+        <type> <fmri><newline>
+
+        Where type can be 'V' (version), 'C' (critical; not used), or
+        'R' (renamed).
+
+        As a special exception, the format of 'R' entries is:
+
+        R <src_stem> <src_version> <dest_stem> <dest_version><newline>
+
+        If a destination package is not provided for 'R', then 'NULL'
+        is used for the destination values.
+
+        Examples:
+
+        C pkg:/[email protected],5.11-0.111:20090507T161015Z
+        V pkg:/[email protected],5.11-0.111:20090508T161015Z
+        R foo 1.0:20090508T161015Z bar 1.0:20090509T161015Z
+        R baz 1.0:20090508T161015Z NULL NULL
+
+        The client catalog file contains entries of this format:
+
+        <type> pkg <fmri_stem> <fmri_version><newline>
+
+        As a special exception, the format of 'R' entries is:
+
+        R <src_stem> <src_version> <dest_stem> <dest_version><newline>
+
+        If a destination package is not provided for 'R', then 'NULL'
+        is used for the destination values.
+
+        Example:
+
+        V pkg foo 0.5.11,5.11-0.111:20090508T161015Z
+
+    - update log
+
+        While not strictly a part of the catalog, the update logs serve
+        as a record of changes to the catalog allowing clients to otbain
+        incremental updates to a catalog instead of retrieving the
+        entire catalog each time.
+
+        It only exists on the server, and contains entries of this
+        format:
+
+        <update_type><type><space><fmri><newline>
+
+        Where 'update_type' can be '+' (there were comments at one
+        time referring to a '-' operation, but they were removed and
+        the code for it was never implemented).
+
+        Where 'type' can be 'V' (version), 'C' (critical; not used),
+        or 'R' (renamed).
+
+        As a special exception, the format of 'R' entries is:
+
+        R <src_stem> <src_version> <dest_stem> <dest_version><newline>
+
+2.1.2  Proposed Catalog Format
+
+    To accomplish the goals listed in section 2.1, a new catalog
+    format will be adopted.  This format will be used by the client
+    to store catalog data locally, regardless of the format used by
+    the repository (e.g. the repository only provides older catalog
+    format).  All data is assumed to be encodable using Unicode as
+    the JSON format specification requires this.
+
+    The new catalog format splits the contents of the catalog into
+    multiple parts, per-locale, but treats them as a unified set.
+    That is, all of the parts have a common base, but can easily be
+    merged at load time if access to multiple parts is needed.
+
+    The catalog will be composed of the following files:
+
+    - catalog.attrs
+        This file will contain a python dict structure serialized in
+        JSON (JavaScript Object Notation) format.  The metadata within
+        is used to describe the catalog and its contents using the
+        following attributes:
+
+        _SIGNATURE:
+            An optional dict of digest and/or cryptograhic values which
+            can be used by clients to verify the integrity of the
+            catalog.attrs data.  Each key should represent the name of
+            the signature or digest used, and each value the signature
+            itself.
+
+        created:
+            The value is an ISO-8601 'basic format' date in UTC time
+            indicating when the catalog was created.  This value is
+            provided by the server.
+
+        last-modified:
+            The value is an ISO-8601 'basic format' date in UTC time
+            indicating when the catalog was last updated.  This value
+            is provided by the server.
+
+        package-count:
+            An integer value indicating the total number of unique
+            FMRI stems in the catalog.
+
+        package-version-count:
+            An integer value indicating the total number of unique
+            FMRI versions in the catalog.
+
+        parts:
+            A dict of available catalog parts.  This is to enable
+            clients to quickly determine what locale-specific catalog
+            data might be available to them.  Each entry contains the
+            date and time a part was created and last modified.  It
+            may also contain digest signature entries for the part (if
+            available) so that clients can verify parts after applying
+            incremental updates.
+
+        updates:
+            A dict of available catalog updates.  Each entry corresponds
+            to the filename of an update log named after the time the
+            update occurred using an ISO-8601 'reduced accuracy basic
+            format'.  Each entry also contains a last-modified date in
+            ISO-8601 basic format to allow clients to determine when an
+            update log was last changed without checking the repository.
+
+        version:
+            An integer value representing the version of the structure
+            used within the attrs, update log, and catalog part files.
+
+        Example:
+
+        {
+            "_SIGNATURE": {
+              "sha-1": "8f5c22fd8218f7a0982d3e3fdd01e40671cb9cab"
+            },
+            "created": "20050614T080000.234231Z",
+            "last-modified": "20090508T161025.686485Z",
+            "package-count": 40802,
+            "package-version-count": 1706,
+            "parts": {
+              "catalog.base.C": {
+                "last-modified": "20090508T161025.686485Z",
+                "signature-sha-1": "9b37ef267ae6aa8a31b878aad4e9baa234470d45",
+              },
+              "catalog.dependency.C": {
+                "last-modified": "20090508T161025.686485Z",
+                "signature-sha-1": "0c896321c59fd2cd4344fec074d55ba9c88f75e8",
+              },
+              "catalog.summary.C": {
+                "last-modified": "20090508T161025.686485Z",
+                "signature-sha-1": "b3a6ab53677c7b5f94c9bd551a484d57b54ed6f7",
+              },
+              "catalog.summary.FR": {
+                "last-modified": "20081002T080903.235212Z",
+                "signature-sha-1": "d2b6cb03677c725f94c9ba551a454d56b54ea2f8",
+              },
+            },
+            "updates": {
+              "update.20081002T08Z.C": {
+                "last-modified": "20081002T080903.235212Z",
+                "signature-sha-1": "a2b6cb03277c725a94c9ba551a454d56b54ea2f8",
+              },
+              "update.20090508T16Z.C": {
+                "last-modified": "20090508T161025.686485Z",
+                "signature-sha-1": "c2b6ca03473c725f94c8ba201a454d56b54ea2f8",
+              },
+            },
+            "version": 1,
+        }
+
+    - catalog.<part_name>.<locale_name>
+
+        Each part of the catalog will contain a python dict structure
+        serialized in JSON format.  <locale_name> is an OpenSolaris
+        system locale name, and should be 'C' if the file applies to
+        all locales.  Version entries for each package stem are kept
+        in ascending version order to allow fast lookups by the client
+        and avoid sort overhead on load.  Also note that any top-level
+        entry key in the structure starting with '_' will be treated
+        as metadata related to the catalog or version entry and must
+        be ignored unless the client can use it.
+
+        Finally, each catalog entry can also contain an optional set
+        of digest and signature key/value pairs that can be used to
+        verify the content of the related package manifest.  Clients
+        must ignore any key/value pairs that are unknown to them within
+        the structure.  The catalog structure can be described as
+        follows:
+
+        {
+          "<optional-signature-dict": {
+            "<signature-name>": "<signature-value>",
+          },
+          "<publisher-prefix>": {
+            "<FMRI package stem>": [
+              {
+                "version": <FMRI version string>,
+                "<optional-actions>": <optional-actions-data>
+                "<optional-signature-name>": "<signature-value>",
+              }
+            ]
+          }
+        }
+
+    Initially, the server will offer the following catalog 'parts'.
+    Each has its content based on a tradeoff between memory usage,
+    load times, and bandwidth needs which depend on the client being
+    used to perform packaging operations or the operation being
+    performed.
+
+    - catalog.base.C
+        This catalog part contains the FMRIs of the packages that the
+        repository contains, and an optional digest value that can be
+        used for verifying the contents of retrieved manifests.  Loading
+        just this content is useful when performing basic listing opera-
+        tions using the cli, or when simply checking to see if a given
+        package FMRI is valid.  Note that since this information is com-
+        mon to all locales, this part of the catalog is only offered for
+        the 'C' locale.  An example of its contents is shown below:
+
+        {
+          "_SIGNATURE": {
+            "sha-1": "9b37ef267ae6aa8a31b878aad4e9baa234470d45",
+          },
+          "opensolaris.org":{
+            "SUNWipkg":[
+              {
+                "version":"0.5.11,5.11-0.117:20090623T135937Z",
+                "signature-sha-1": "596f26c4fc725b486faba089071d2b3b35482114"
+              },
+              {
+                "version":"0.5.11,5.11-0.118:20090707T220625Z",
+                "signature-sha-1": "ab6f26c4fc725b386faca089071d2b3d35482114"
+              }
+            ],
+            "SUNWsolnm":[
+              {
+                "version":"0.5.11,5.11-0.117:20090623T144046Z",
+                "signature-sha-1": "fe6f26c4fc124b386faca089071d2b3a35482114"
+              },
+              {
+                "version":"0.5.11,5.11-0.118:20090707T224654Z",
+                "signature-sha-1": "696f26c4fc124b386facb089071d2b3f35482114"
+              }
+            ]
+          }
+        }
+
+    - catalog.dependency.C
+        This catalog part contains the FMRIs of the packages that the
+        repository contains, any 'depend' actions, and any 'set' actions
+        for facets or variants.  This information is intended to be used
+        during dependency calculation by install, uninstall, etc.  It is
+        anticipated that package size summary information, and actions
+        for set pkg.renamed and pkg.obsolete will be stored in this part
+        as well when they become available.  Note that since this infor-
+        mation is common to all locales, this part of the catalog is
+        only offered for the 'C' locale.  An example of its contents is
+        shown below:
+
+        {
+          "_SIGNATURE": {
+            "sha-1": "0c896321c59fd2cd4344fec074d55ba9c88f75e8",
+          },
+          "opensolaris.org":{
+            "SUNWdvdrw":[
+              {
+                "version":"5.21.4.10.8,5.11-0.108:20090218T042840Z",
+                "actions":[
+                  "set name=variant.zone value=global value=nonglobal",
+                  "set name=variant.arch value=sparc value=i386",
+                  "depend [email protected] type=require",
+                  "depend [email protected] type=require",
+                  "depend [email protected] type=require"
+                ]
+              }
+            ],
+            "SUNWthunderbirdl10n-extra":[
+              {
+                "version":"0.5.11,5.11-0.75:20071114T205327Z",
+              }
+            ]
+          }
+        }
+
+    - catalog.summary.<locale_name>
+        This catalog part contains the FMRIs of the packages that the
+        repository contains and any 'set' actions (excluding those for
+        facets or variants).  This information is intended to be used
+        primarily by GUI clients such as packagemanager(1), or the BUI
+        (Browser UI) provided by pkg.depotd(1m) for quick, efficient
+        access to package metadata for listing.  An example is shown
+        below:
+
+        {
+          "_SIGNATURE": {
+            "catalog-sha-1": "b3a6ab53677c7b5f94c9bd551a484d57b54ed6f7",
+          },
+          "opensolaris.org":{
+            "SUNWdvdrw":[
+              {
+                "version":"5.21.4.10.8,5.11-0.108:20090218T042840Z",
+                "actions":[
+                  "set name=description value=\"DVD creation utilities\"",
+                  "set name=info.classification value=org.opensolaris.category.2008:System/Media",
+                ]
+              }
+            ],
+            "SUNWthunderbirdl10n-extra":[
+              {
+                "version":"0.5.11,5.11-0.75:20071114T205327Z",
+                "actions":[
+                  "set name=description value=\"Thunderbird localization - other 15 lang\"",
+                  "set name=srcpkgs value=SUNWthunderbirdl10n-extra"
+                ]
+              }
+            ]
+          }
+        }
+
+    - update.<logdate>.<locale_name>
+
+        This file will contain a python dict structure serialized in
+        JSON (JavaScript Object Notation) format.  Where <logdate> is
+        a UTC date and time in  ISO-8601 'reduced accuracy basic
+        format'. <locale_name> is an OpenSolaris system locale name,
+        and should be 'C' if the update log applies to all locales.
+
+        The structure of catalog update files is similar to that of
+        of catalog files, with a few exceptions.  First, each version
+        entry contains additional elements indicating the catalog
+        operation and the time of the operation.  Second, each entry
+        also contains a set of dicts keyed by catalog part name
+        indicating which catalog parts the package was added to
+        contents of each of these dicts is the exact contents of the
+        package's catalog part entry (excluding version).
+
+        The supported types (<op-type> in the example below) of catalog
+        operations are:
+
+        'add'       Indicates that the corresponding FMRI and metadata
+                    (if present) has been added to the catalog.
+
+        'remove'    Indicates that the corresponding FMRI has been
+                    removed from the catalog.
+
+        The structure can be described as follows:
+
+        {
+          <optional-signature-or-signature-dict>: {
+            <signature-or-signature-name>: <signature-or-signature-value>,
+          },
+          <publisher-prefix>: {
+            <FMRI package stem>: [
+              {
+                "op-type": <type-of-operation>
+                "op-time": <ISO-8601 Date and Time>
+                "version": <FMRI version string>,
+                "<catalog.part.name>": <catalog-type-metadata>,
+              }
+            ]
+          }
+        }
+
+        An example update log might consist of the following:
+
+        {
+          "_SIGNATURE": {
+            "sha-1": "0c896321c59fd2cd4344fec074d55ba9c88f75e8",
+          },
+          "opensolaris.org":{
+            "SUNWthunderbirdl10n-extra":[
+              {
+                "op-type": "remove",
+                "op-time": "20090218T042838Z"
+                "version":"0.5.11,5.11-0.75:20071114T205327Z",
+              }
+            ],
+            "SUNWdvdrw":[
+              {
+                "op-type": "add",
+                "op-time": "20090524T042841Z",
+                "version":"5.21.4.10.8,5.11-0.111:20090524T042840Z",
+                "catalog.dependency.C": {
+                  "actions": [
+                    "depend [email protected] type=require",
+                    "depend [email protected] type=require",
+                    "depend [email protected] type=require"
+                  ],
+                },
+                "catalog.summary.C": {
+                  "actions": [
+                    "set name=description value=\"DVD creation utilities\"",
+                    "set name=info.classification value=org.opensolaris.category.2008:System/Media",
+                  ],
+                ],
+                "signature-sha-1": "fe6f26c4fc124b386faca089071d2b3a35482114",
+              }
+            ]
+          }
+        }
+
+    Please note that the digest and cryptographic information is
+    optional since older repositories won't have the information and
+    some users of the depot software may choose to not provide it.
+    For a detailed discussion on the choice of data format and a
+    performance analysis, see section 3.
+
+2.2  Server Changes
+
+    To enable clients to retrieve the new catalog files and incremental
+    updates to them, the following changes will be made:
+
+    - The new catalog files will be stored in the <repo_dir>/catalog
+      directory using the filenames described in section 2.1.2.  Any
+      existing catalog files will be converted to the new format upon
+      load (using writable-root if present) and the old ones removed
+      (unless readonly operation is in effect).
+
+    - Operations that modify the catalog file will be changed to write
+      out all of the new catalogs only; the version 0 catalog will no
+      longer be stored or used.
+
+    - The depot server will be changed to offer an additional catalog
+      operation "/catalog/1/" which will be added to the output of the
+      "/versions/0/" operation as well.  It will provide a simple GET-
+      based HTTP/1.1 interface for retrieving catalog and update log
+      files from the server.  It will not require or use any headers
+      other than those normally present within a standard HTTP/1.1
+      transaction.  However, the client api will continue to provide
+      the uuid, intent, and user agent headers that are provided today
+      for the existing "/catalog/0/" operation.
+
+    - The existing "/catalog/0/" operation will continue to be offered
+      by the depot server for compatibility with older clients.
+
+    - The depot server will be changed to perform a simple sanity check
+      when starting to verify that the packages in the catalog are
+      physically present in the repository and that the catalog attrs
+      files match the catalog files.  Likewise, the update logs will
+      be checked to verify that they are valid for the catalogs.  If
+      any of these files are found to be not valid, a warning will be
+      logged and the catalog files rewritten (using writable-root if
+      applicable).  In addition, any of the corrections made will
+      result in corresponding update log entries so that incremental
+      updates will not be broken for existing clients.
+
+2.3 Client Changes
+
+    To take advantage of the new catalog format, and to improve the
+    performance of clients, a number of changes will need to be made
+    to the pkg.client.api and its supporting classes.  All of the
+    changes proposed here should be transparent to client api
+    consumers.
+
+2.3.1  Image Changes
+
+    - The image object, upon initialization, will remove the
+      /var/pkg/catalog directory and its contents if possible.
+      If this cannot be done (due to permissions), the client
+      will continue on.  If it can be removed, a new directory
+      named /var/pkg/publisher be created, and publisher objects
+      will be told to store and retrieve their metadata from it.
+
+    - Publisher objects will store their catalog data within the
+      directory <meta_root>/<prefix>/catalog/.
+
+    - Any functions contained within the image class for the
+      direct storage, retrieval, updating, etc. of publisher
+      metadata will be moved to the pkg.client.publisher and
+      Catalog classes.
+
+    - A new "Catalog" object reference will be added to the
+      image class, which will be used to allow the api access
+      to catalog metadata.  This object will allow callers to
+      ask for a specific set of catalog data for an operation
+      (where the allowed sets match the names of the catalogs
+      described in section 2.1.2).  The data will then be
+      retrieved and stored for usage by callers as needed.
+
+    - The existing catalog caching mechanism will be removed
+      completely as it has been superseded by the new catalog
+      format.
+
+    - For performance reasons, the client api will also store
+      versions of each of the catalogs proposed that only
+      contain entries for installed FMRIs to accelerate common
+      client functions such as info, list, uninstall, etc. This
+      change will also result in the obsoletion of the current
+      /var/pkg/state directory and /var/pkg/pkg/<stem>/<ver>/
+      installed files, which will be removed and converted
+      during the image initialization process if possible.
+
+    - All api functions will be changed to retrieve the catalog
+      data they need instead of depending upon api consumers to
+      do so.
+
+2.3.2  Catalog Retrieval and Update Changes
+
+    - If a repository only offers version 0 of the catalog format,
+      then the client API will retrieve it, but transform and store
+      the catalog in version 1 format using the times the server
+      provides.
+
+    - If version 1 catalog data is not available, the client api will
+      fallback to retrieving catalog metadata by retrieving package
+      manifests (as it does today).  This will be transparent to
+      clients.
+
+    - When checking for catalog updates, the api(s) will follow this
+      process for version 1 catalogs when determining if a full or
+      incremental update should be performed for each catalog in the
+      image:
+
+      * If the repository now offers a version 1 catalog, but did not
+        do so previously, a full catalog retrieval will be performed.
+
+      * A conditional retrieval of the catalog.attrs file will be
+        performed using the last-modified date contained within it.
+        If a 304 (or not modififed status) is returned, then the
+        catalog will be skipped during the update process.
+
+      * The resulting file will then be loaded and the integrity of the
+        attrs file verified by omitting the '_SIGNATURE' portion of the
+        data structure and using the information that was present within
+        to verify the integrity of the attrs file.  If the integrity
+        check fails, a transport exception will be raised.
+
+      * If the attrs file was retrieved successfully, it will be checked
+        as follows:
+
+        - If the created date in the retrieved attrs file does not
+          match the stored attrs file, a full catalog retrieval will be
+          performed as the catalog has been rebuilt.  In addition, a
+          warning will be provided to the client that there may be
+          something wrong with the repository (packages may be missing,
+          etc.).
+
+        - If the created date matches, then the version in the new attrs
+          file will be compared to the original, if they do not match a
+          full catalog retrieval will be performed as the format of the
+          catalog has changed (unless the client is unable to parse that
+          format in which case an error will be raised).
+
+        - If the version was valid, then the last modified date in the
+          new catalog.attrs file will be compared to the original attrs
+          file.  If the original attrs date is newer, then a full
+          catalog retrieval will be performed and the user will be
+          warned that there may be something wrong with the repository
+          (packages may no longer be available, etc.).  If the last
+          modified date in the original attrs file is the same as the
+          new attrs file, then no updates are available and the catalog
+          will be skipped.  If the original attrs last modified date is
+          older than the new attrs last modified date, then the 'update-
+          logs' property will be checked to see if there are incremental
+          updates available.  
+
+        - If the update-logs property is empty, a full catalog retrieval
+          will be performed with the assumption that the repository has
+          intentionally discarded all of its incremental update
+          information.  If the oldest update log listed in the new attrs
+          file is newer than the last modified date of the original
+          attrs file, then this client has not performed an incremental
+          for a period long enough that the repository no longer offers
+          incremental updates for their version of the catalog, and a
+          full catalog retrieval will be performed.
+
+        - Finally, if all of the above was successful, the api will then
+          start the incremental update process.
+
+    - When attempting to determine what incremental catalog updates
+      for version 1 catalogs are available, and the repository offers
+      version 1 catalogs, the client api(s) will use the following
+      process:
+
+        * The modified date and time of the update log the client last
+          retrieved will compared against the corresponding entry in
+          catalog.attrs.  If it has not been modified, the update log
+          will be skipped.  Otherwise it will be retrieved, and added
+          to the incremental update queue. This is necessary since
+          update logs are per-hour and a change may have occurred since
+          the last time the update log was retrieved.
+
+        * The api will then retrieve any remaining update logs listed in
+          the catalog.attrs file that have a <logdate> newer than the
+          last time the client's local copy of the catalog was updated.
+          Each will be added to the update queue after retrieval.
+
+        * Each update log file will then loaded and verified by omitting
+          the '_SIGNATURE' portion of the structures and using the
+          information that was present within it to verify the integrity
+          of the update log.  If the integrity check fails, a transport
+          exception will be raised.
+
+    - When applying the queued catalog updates, the client api will
+      use this process for each update log:
+
+        * each corresponding catalog part present in the image will be
+          loaded, and then any update log entries newer than the last
+          modified date of the catalog (based on op-time) will be
+          applied to the catalog as dicated by op-type
+
+        * if at any point, an update log entry cannot be applied as
+          directed, then a full catalog retrieval will be forced, and
+          the user will be warned that something may be wrong with the
+          repository (missing packages, etc.)
+
+        * if the update log is the last in the queue for a given set of
+          catalogs, then all previous ones will be removed as they are
+          no longer needed
+
+    - When attempting to verify the integrity of a full catalog part
+      retrieval, the api will use this process:
+
+      * The catalog parts will be loaded into memory and the
+        '_SIGNATURE' portion of the data structure removed.
+
+      * The api will then check the catalog.attrs file for digest
+        and/or cryptographic information related to the catalog.
+        If the information is present, it will then be used to
+        verify the integrity of the retrieved catalog parts.
+
+3.  Appendix
+
+3.1  Overview
+
+    During the development of this proposal, a number of different
+    approaches to the storage and retrieval of catalog data were
+    considered.  Specifically, the following formats were considered
+    and/or evaluated:
+
+    - manifest
+        A pure "manifest-style" format similar to the existing package
+        manifest.
+
+    - JSON
+        The portable JavaScript Object Notation-based format.
+
+    Size and performance characteristics for each of these formats can
+    be found in section 3.3.
+
+3.2  Evaluations
+
+3.2.1  manifest-style format evaluation
+
+    Initially, the "manifest-style" format seemed promising from a
+    performance and disk footprint standpoint when compared to using
+    JSON.  A few variations of this format were attempted, and examples
+    of this can be seen below:
+
+    - variation 1
+        pkg://opensolaris.org/[email protected],5.11-0.86:20080422T230436Z [email protected],[email protected]
+
+    - variation 2
+        pkg://opensolaris.org/[email protected],5.11-0.86:20080422T230436Z
+        depend fmri=pkg:/[email protected] type=require
+        depend fmri=pkg:/[email protected] type=require
+
+    - variation 3
+        After realising that variant and facet information was needed,
+        and that additional attributes might need to be accounted for in
+        the future, variation 3 was chosen for evaluation.
+
+        pkg://opensolaris.org/[email protected],5.11-0.106:20090131T184044Z
+        set name=variant.zone value=global value=nonglobal
+        set name=variant.arch value=sparc value=i386
+        depend [email protected] type=require
+        depend [email protected] type=require
+
+3.2.2  JSON format evaluation
+
+    When first evaluating JSON, results on x86-based systems were very
+    comparable or significantly better than the manifest-based format
+    from both a file size and performance perspective.  The following
+    structural variations were evaluated:
+
+    - variation 1
+        Variation one attempted to combine the catalog and attrs files,
+        but this approach was abandoned for simplicity and performance
+        reasons in later variations.
+
+    {
+        "attributes": {
+            "id": "556599b2-aae8-4e67-94b3-c58a07dbd91b",
+            "last-modified: "2009-05-08T16:10:25.686485",
+            "locale": "C",
+            "package-count: 40802,
+            "version: 1,
+        },
+        "packages": {
+            "SUNWipkg": {
+                "publisher": "opensolaris.org",
+            "versions": [
+                    "0.5.11,5.11-0.111:20090331T083235Z",
+                    "0.5.11,5.11-0.111:20090418T191601Z",
+                    "0.5.11,5.11-0.111:20090508T161025Z",
+                ],
+            },
+        },
+    } 
+
+    - variation 2
+    {
+      "packages":{
+        "SUNWlang-cs-extra":{
+          "publisher":"opensolaris.org",
+          "versions":[
+            [
+              "0.5.11,5.11-0.86:20080422T230436Z",
+              {
+                "depend":{
+                  "require":[
+                    {
+                      "fmri":"foo"
+                    },
+                    {
+                      "fmri":"bar"
+                    }
+                  ],
+                  "optional":[
+                    {
+                      "fmri":"baz"
+                    },
+                    {
+                      "fmri":"quux"
+                    }
+                  ]
+                }
+              }
+            ],
+          ]
+        }
+      }
+    }
+
+    - variation 3
+        This variation was attempted due to extreme performance issues
+        that were seen on some lower-memory bandwidth SPARC systems
+        when writing JSON files.  It was discovered that the simplejson
+        library uses a recursive call structure for iterative encoding
+        of python data structures and this does not perform well on many
+        SPARC systems.
+        
+        By changing the structure to a list of lists, a decrease om
+        write times of 20-30 seconds was realised.  However, this was
+        less than desirable as it meant the resulting data structure
+        would have to be significantly tranformed after load for use
+        by the package system.
+
+    [['pkg://opensolaris.org/[email protected],5.11-0.111:20090508T163711Z'],
+    ['pkg://opensolaris.org/[email protected],5.11-0.99:20081002T152038Z',
+      [['require',
+        ['[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]',
+         '[email protected]']]]]
+    ]
+
+    - variation 4
+        This variation was struck upon after the failure of the last
+        with the attempt to have a data structure that was immediately
+        useful to the packaging system after load:
+
+    {
+      'opensolaris.org': {
+        'SUNWsongbird': [
+          {
+            'depend': {
+              'require': [
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]',
+                '[email protected]'
+              ]
+            },
+            'version': '0.5.11,5.11-0.99:20081002T152038Z'
+          },
+        ],
+        'SUNWstc': [
+          {
+            'version': '0.5.11,5.11-0.106:20090131T191239Z'
+          },
+        ],
+      },
+    }
+
+    - variation 5
+        The final variation is what was chosen for final evaluation for
+        JSON after discussions with other team members centered around
+        a key point: that the catalog is essentially an action pipeline
+        for the client.  In addition, the prior variations were either
+        hampered by poor serialization performance on SPARC systems or
+        lacked the extensibility needed for possible future attribute
+        additions to actions.
+        
+        {
+          "opensolaris.org":{
+            "SUNWdvdrw":[
+              {
+                "version":"5.21.4.10.8,5.11-0.108:20090218T042840Z",
+                "actions":[
+                  "set name=description value=\"DVD creation utilities\"",
+                ]
+              }
+            ],
+          }
+        }
+
+3.2.3  Performance Analysis
+
+    While a performance analysis was done for each variation during the
+    evaluation process, only the results for the chosen variation are
+    shown here.  Analyis was performed using a dump of the /dev repo
+    for builds 118 and prior consisting of 42,565 unique FMRIs.
+
+    Each format that was evaluated presented unique challenges.  While
+    the manifest-style provided simplicity and familiarity, it became
+    increasingly apparent during testing that any code that was used
+    to parse and write it would have to be changed significantly each
+    time changes were made to any in-memory structures that were used
+    as the source.  In contrast, the JSON format made it easy to re-use
+    the in-memory python structure as the same format to be written to
+    disk.
+
+    The uncompressed and gzip-compressed (provided because both Apache
+    and cherrypy are capable of gzip compressing requests) are shown
+    below for comparison.  Of special note is the 'all' catalog shown
+    below which was created to evaluate the feasbility of having a
+    single catalog that provided access to all commonly needed metadata
+    by combining the base, dependency, and summary catalogs proposed in
+    section 2.1.2.
+
+    =================================================================
+    Size Comparison
+    =================================================================
+    Catalog     Mfst. Sz.   JSON Sz.    Mfst. CSz.  JSON CSz.
+    -----------------------------------------------------------------
+    current     2.25  MiB   -           327 KiB     -
+    base        2.86  MiB   2.00  MiB   305 KiB     246 KiB
+    dependency  16.44 MiB   16.45 MiB   1.4 MiB     1.4 MiB
+    summary     7.58  MiB   7.36  MiB   483 KiB     475 KiB
+    all         21.16 MiB   21.47 MiB   1.6 MiB     1.6 MiB
+
+    The time needed to read and write each format is shown below for
+    comparison.  Several runs for each catalog were performed to verify
+    that the timings were consistent, and the load of each system was
+    checked to verify that timings were not skewed.
+
+    =================================================================
+    Base Catalog Timings
+    =================================================================
+    System          Mfst. Wr.   JSON Wr.    Mfst. Rd.   JSON Rd.
+    -----------------------------------------------------------------
+    mine            0.13s       0.41s       0.19s       0.05s
+    ipkg.sfbay      0.19s       0.58s       0.29s       0.08s
+    kodiak.eng      0.30s       0.99s       0.37s       0.08s
+    cuphead.sfbay   1.18s       3.41s       1.54s       0.33s
+    jurassic.eng    1.37s       3.77s       1.31s       0.46s
+    -----------------------------------------------------------------
+    Mean            0.63s       1.83s       0.74s       0.20s
+
+    =================================================================
+    Dependency Catalog Timings
+    =================================================================
+    System          Mfst. Wr.   JSON Wr.    Mfst. Rd.   JSON Rd.
+    -----------------------------------------------------------------
+    mine            0.42s       1.06s       1.13s       0.24s
+    ipkg.sfbay      0.98s       1.65s       1.70s       0.39s
+    kodiak.eng      0.91s       2.61s       2.22s       0.40s
+    cuphead.sfbay   6.05s       9.00s       8.57s       1.57s
+    jurassic.eng    3.87s       10.46s      6.48s       2.13s
+    -----------------------------------------------------------------
+    Mean            2.45s       4.96s       4.02s       0.95s
+
+    =================================================================
+    Summary Catalog Timings
+    =================================================================
+    System          Mfst. Wr.   JSON Wr.    Mfst. Rd.   JSON Rd.
+    -----------------------------------------------------------------
+    mine            0.16s       0.78s       0.58s       0.14s
+    ipkg.sfbay      0.33s       1.09s       0.86s       0.22s
+    kodiak.eng      0.35s       1.90s       1.10s       0.25s
+    cuphead.sfbay   2.02s       6.55s       4.41s       0.92s
+    jurassic.eng    1.35s       7.24s       3.34s       1.25s
+    -----------------------------------------------------------------
+    Mean            0.84s       3.51s       2.06s       0.56s
+
+    =================================================================
+    'all' Catalog Timings
+    =================================================================
+    System          Mfst. Wr.   JSON Wr.    Mfst. Rd.   JSON Rd.
+    -----------------------------------------------------------------
+    mine            0.51s       1.22s       1.48s       0.31s
+    ipkg.sfbay      1.22s       1.89s       2.30s       0.51s
+    kodiak.eng      1.09s       3.05s       2.93s       0.53s
+    cuphead.sfbay   7.35s       10.38s      11.15s      2.02s
+    jurassic.eng    4.57s       12.20s      8.28s       2.74s
+    -----------------------------------------------------------------
+    Mean            2.95s       5.75s       5.23s       1.22s
+
+    System Notes:
+    - 'mine' is an Intel Core 2 DUO E8600 with 8GiB RAM
+    - ipkg.sfbay is a dual Opteron 2218 with 16GiB RAM
+    - kodiak.eng is a SPARC64-VI box with 32GiB RAM
+    - cuphead.sfbay is an UltraSparc-T2 with 3GiB RAM
+      (likely ldom or zone)
+    - jurassic.eng is an UltraSPARC-III+ with 32GiB RAM
+
+    From the timings seen above, it should become apparent that JSON
+    serialization performance is, on average, noticeably slower when
+    compared to a simple manifest-style format.  In particular, this
+    is very noticeable on lower memory-bandwidth SPARC systems.
+    
+    It was discovered that the likely reason for poor serialization on
+    some SPARC systems is that simplejson uses a recursive function-
+    based iterative encoder that does not perform well on SPARC systems
+    (due to register windows?).
+
+    This is likely because the call stack depth for the encoder will
+    match that of any python structure that it encodes.  During the
+    evaluation of possible format variations, this resulted in a
+    hybrid approach that combined a python dict with a simple list
+    of actions with the hope that further improvements could be made
+    to simplejson at some future date.  Without this approach,
+    significant increases in write times were seen (20-30 seconds)
+    when using a pure dict-based structure.
+
+    Conversely though, JSON read performance, on average, is noticeably
+    faster compared a manifest-style format.  In part, this is because
+    more work has to be performed to transform the manifest-style format
+    into an equivalent python data structure.  Notably, there is a large
+    cost to sorting package versions after load (having the version data
+    in ascending order is extremely useful to the client).
+
+    Finally, a comparison of the heap size overhead (defined as the
+    difference between the size of the heap before loading a catalog
+    and after as measured on my x86 system) is shown for comparison
+    below:
+
+    =================================================================
+    'Heap' Overhead Comparison
+    =================================================================
+    Catalog     Mfst. Sz.   JSON Sz.    Increase Sz.    Inc. %
+    -----------------------------------------------------------------
+    base        9.16  MiB   12.45 MiB   +3.29  MiB      +35.92%
+    dependency  32.34 MiB   51.48 MiB   +19.14 MiB      +59.19%
+    summary     16.84 MiB   27.41 MiB   +10.57 MiB      +62.76%
+    all         39.87 MiB   63.88 MiB   +24.01 MiB      +60.22%
+
+3.3  Conclusion
+
+    When comparing the numbers alone, it seems as though the manifest-
+    style format should have been chosen based solely on:
+
+    - lower memory usage (43.6% less than JSON on average)
+
+    - faster write times (1.71s on average compared to 4.01s on average
+      for JSON)
+
+    However, ultimately, the manifest-style format was rejected for
+    reasons beyond simple numbers:
+
+    - desire for a defined grammar and syntax
+
+    - required maintaining custom parsing and storage code
+
+    - not easily extensible such that if additional metadata
+      was needed that a protocol or file format revision might
+      be required
+
+    - when weighing read performance vs. write performance,
+      read performance was considered more important as updates
+      to the catalog will happen far less freqeuntly than loads
+      of package data (loads took 3.01s on average for manifest-
+      style compared to 0.73s on average for JSON or about 75.75%
+      longer)
+
+    Instead, the JSON format was selected for the following reasons:
+
+    - full unicode support
+
+    - well-defined grammar and structure
+
+    - supported data types almost exactly mirror python's own
+      native data types
+
+    - allowed easy storage of existing action data of which
+      catalogs are essentially a summarized view of
+
+    - a python library for the parsing and writing of JSON is
+      part of python 2.6+
+
+    - JSON is easily portable to other systems and myriad
+      tools are available to parse and write it
+
+    - it is anticipated that the performance of simplejson will
+      only improve over time
+
+    As a final note, the approach of using separate catalogs for each
+    set of data instead of a single, merged catalog was used to reduce
+    memory usage and the amount of data that needs to be transferred
+    for clients.
+
--- a/doc/server_api_versions.txt	Thu Oct 22 19:13:06 2009 +0100
+++ b/doc/server_api_versions.txt	Fri Oct 23 17:43:37 2009 -0500
@@ -1,3 +1,22 @@
+Version 6:
+Incompatible with clients using versions 0-5.
+    CatalogInterface:
+        * get_matching_pattern_fmris() and get_matching_version_fmris()
+          now return a tuple of (fmris, unmatched).  Where 'fmris' is
+          a list of matching FMRIs, and 'unmatched' is a dict of
+          unmatched patterns or versions indexed by match criteria.
+
+        * package_count now returns the number of unique packages in the
+          catalog instead of the unique number of package versions.
+
+        * package_version_count, a new property, was added that contains
+          the number of unique package versions in the catalog.
+
+     ConfigInterface:
+        * get_repo_attrs was renamed to get_repo_properties
+
+        * get_repo_attr_value was renamed to get_repo_property_value
+
 Version 5:
 Compatible with clients using Versions 3-4.
     ConfigInterface.get_repo_attr_value() has changed as follows:
--- a/src/client.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/client.py	Fri Oct 23 17:43:37 2009 -0500
@@ -49,6 +49,7 @@
 import gettext
 import itertools
 import locale
+import logging
 import os
 import socket
 import sys
@@ -76,7 +77,7 @@
 from pkg.client.history import (RESULT_CANCELED, RESULT_FAILED_BAD_REQUEST,
     RESULT_FAILED_CONFIGURATION, RESULT_FAILED_TRANSPORT, RESULT_FAILED_UNKNOWN,
     RESULT_FAILED_OUTOFMEMORY)
-from pkg.misc import EmptyI, msg, emsg, PipeError
+from pkg.misc import EmptyI, msg, PipeError
 
 CLIENT_API_VERSION = 22
 PKG_CLIENT_NAME = "pkg"
@@ -85,6 +86,8 @@
 JUST_LEFT = -1
 JUST_RIGHT = 1
 
+logger = global_settings.logger
+
 valid_special_attrs = ["action.name", "action.key", "action.raw"]
 
 valid_special_prefixes = ["action."]
@@ -109,7 +112,7 @@
 
         # This has to be a constant value as we can't reliably get our actual
         # program name on all platforms.
-        emsg(ws + pkg_cmd + text_nows)
+        logger.error(ws + pkg_cmd + text_nows)
 
 def usage(usage_error=None, cmd=None, retcode=2, full=False):
         """Emit a usage message and optionally prefix it with a more
@@ -120,10 +123,10 @@
 
         if not full:
                 # The full usage message isn't desired.
-                emsg(_("Try `pkg --help or -?' for more information."))
+                logger.error(_("Try `pkg --help or -?' for more information."))
                 sys.exit(retcode)
 
-        emsg(_("""\
+        logger.error(_("""\
 Usage:
         pkg [options] command [cmd_options] [operands]
 
@@ -319,8 +322,8 @@
                                 pf = pfmri.get_name() + pub
 
                                 try:
-                                        ret = api_inst.info([pfmri], False,
-                                            info_needed)
+                                        ret = api_inst.info([pfmri],
+                                            not all_known, info_needed)
                                         pis = ret[api.ImageInterface.INFO_FOUND]
                                 except api_errors.ApiException, e:
                                         error(e)
@@ -335,18 +338,18 @@
 
                 if not found:
                         if not seen_one_pkg and not all_known:
-                                emsg(_("no packages installed"))
+                                logger.error(_("no packages installed"))
                                 img.history.operation_result = \
                                     history.RESULT_NOTHING_TO_DO
                                 return 1
 
                         if upgradable_only:
                                 if pargs:
-                                        emsg(_("No specified packages have " \
-                                            "available updates"))
+                                        logger.error(_("No specified packages "
+                                            "have available updates"))
                                 else:
-                                        emsg(_("No installed packages have " \
-                                            "available updates"))
+                                        logger.error(_("No installed packages "
+                                            "have available updates"))
                                 img.history.operation_result = \
                                     history.RESULT_NOTHING_TO_DO
                                 return 1
@@ -422,10 +425,11 @@
                 try:
                         success = img.repair(repairs, progresstracker)
                 except api_errors.RebootNeededOnLiveImageException:
-                        error(_("Requested \"fix\" operation would affect files that cannot be "
-                                "modified in live image.\n"
-                                "Please retry this operation on an alternate boot environment."))
-                	success = False
+                        error(_("Requested \"fix\" operation would affect "
+                            "files that cannot be modified in live image.\n"
+                            "Please retry this operation on an alternate boot "
+                            "environment."))
+                        success = False
 
                 if not success:
                         progresstracker.verify_done()
@@ -463,7 +467,7 @@
 
         if illegals:
                 for i in illegals:
-                        emsg(str(i))
+                        logger.error(str(i))
                 return 1
 
         any_errors = False
@@ -504,19 +508,19 @@
 
         if notfound:
                 if fmris:
-                        emsg()
-                emsg(_("""\
+                        logger.error("")
+                logger.error(_("""\
 pkg: no packages matching the following patterns you specified are
 installed on the system.\n"""))
                 for p in notfound:
-                        emsg("        %s" % p)
+                        logger.error("        %s" % p)
                 if fmris:
                         if any_errors:
                                 msg2 = "See above for\nverification failures."
                         else:
                                 msg2 = "No packages failed\nverification."
-                        emsg(_("\nAll other patterns matched installed "
-                            "packages.  %s" % msg2))
+                        logger.error(_("\nAll other patterns matched "
+                            "installed packages.  %s" % msg2))
                 any_errors = True
 
         if any_errors:
@@ -569,6 +573,9 @@
         except api_errors.ProblematicPermissionsIndexException, e:
                 error(str(e) + PROBLEMATIC_PERMISSIONS_ERROR_MESSAGE)
                 return False
+        except api_errors.ReadOnlyFileSystemException, e:
+                error(e)
+                raise
         except api_errors.PermissionsException, e:
                 # Prepend a newline because otherwise the exception will
                 # be printed on the same line as the spinner.
@@ -590,9 +597,6 @@
                 error(_("An unexpected error happened during " \
                     "%s: %s") % (operation, e))
                 raise
-        except api_errors.ReadOnlyFileSystemException, e:
-                error(e)
-                raise
         except Exception, e:
                 error(_("An unexpected error happened during " \
                     "%s: %s") % (operation, e))
@@ -632,7 +636,7 @@
 Cannot remove '%s' due to the following packages that depend on it:"""
                     ) % e[0])
                 for d in e[1]:
-                        emsg("  %s" % d)
+                        logger.error("  %s" % d)
                 return False
         if e_type == api_errors.CatalogRefreshException:
                 if display_catalog_failures(e) != 0:
@@ -1299,6 +1303,8 @@
                 if not display_license:
                         info_needed = api.PackageInfo.ALL_OPTIONS - \
                             frozenset([api.PackageInfo.LICENSES])
+                info_needed -= api.PackageInfo.ACTION_OPTIONS
+
                 try:
                         ret = api_inst.info(pargs, info_local, info_needed)
                 except api_errors.UnrecognizedOptionsToInfo, e:
@@ -1372,38 +1378,38 @@
         if notfound:
                 err = 1
                 if pis:
-                        emsg()
+                        logger.error("")
                 if info_local:
-                        emsg(_("""\
+                        logger.error(_("""\
 pkg: info: no packages matching the following patterns you specified are
 installed on the system.  Try specifying -r to query remotely:"""))
                 elif info_remote:
-                        emsg(_("""\
+                        logger.error(_("""\
 pkg: info: no packages matching the following patterns you specified were
 found in the catalog.  Try relaxing the patterns, refreshing, and/or
 examining the catalogs:"""))
-                emsg()
+                logger.error("")
                 for p in notfound:
-                        emsg("        %s" % p)
+                        logger.error("        %s" % p)
 
         if illegals:
                 err = 1
                 for i in illegals:
-                        emsg(str(i))
+                        logger.error(str(i))
 
         if multi_match:
                 err = 1
                 for pfmri, matches in multi_match:
                         error(_("'%s' matches multiple packages") % pfmri)
                         for k in matches:
-                                emsg("\t%s" % k)
+                                logger.error("\t%s" % k)
 
         if no_licenses:
                 err = 1
                 error(_("no license information could be found for the "
                     "following packages:"))
                 for pfmri in no_licenses:
-                        emsg("\t%s" % pfmri)
+                        logger.error("\t%s" % pfmri)
 
         return err
 
@@ -1608,9 +1614,9 @@
         """
 
         if reference is None:
-                reference=valid_special_attrs
+                reference = valid_special_attrs
         if prefixes is None:
-                prefixes=valid_special_prefixes
+                prefixes = valid_special_prefixes
         for a in attrs:
                 for p in prefixes:
                         if a.startswith(p) and not a in reference:
@@ -1685,7 +1691,7 @@
 
                 if illegals:
                         for i in illegals:
-                                emsg(i)
+                                logger.error(i)
                         img.history.operation_result = \
                             history.RESULT_FAILED_BAD_REQUEST
                         return 1
@@ -1795,19 +1801,19 @@
         if notfound:
                 err = 1
                 if fmris:
-                        emsg()
+                        logger.error("")
                 if local:
-                        emsg(_("""\
+                        logger.error(_("""\
 pkg: contents: no packages matching the following patterns you specified are
 installed on the system.  Try specifying -r to query remotely:"""))
                 elif remote:
-                        emsg(_("""\
+                        logger.error(_("""\
 pkg: contents: no packages matching the following patterns you specified were
 found in the catalog.  Try relaxing the patterns, refreshing, and/or
 examining the catalogs:"""))
-                emsg()
+                logger.error("")
                 for p in notfound:
-                        emsg("        %s" % p)
+                        logger.error("        %s" % p)
                 img.history.operation_result = history.RESULT_NOTHING_TO_DO
         else:
                 img.history.operation_result = history.RESULT_SUCCEEDED
@@ -1821,32 +1827,32 @@
             total)
         if cre.failed:
                 # This ensures that the text gets printed before the errors.
-                emsg(txt)
+                logger.error(txt)
         else:
                 msg(txt)
 
         for pub, err in cre.failed:
                 if isinstance(err, urllib2.HTTPError):
-                        emsg("   %s: %s - %s" % \
+                        logger.error("   %s: %s - %s" % \
                             (err.filename, err.code, err.msg))
                 elif isinstance(err, urllib2.URLError):
                         if err.args[0][0] == 8:
-                                emsg("    %s: %s" % \
+                                logger.error("    %s: %s" % \
                                     (urlparse.urlsplit(
                                         pub["origin"])[1].split(":")[0],
                                     err.args[0][1]))
                         else:
                                 if isinstance(err.args[0], socket.timeout):
-                                        emsg("    %s: %s" % \
+                                        logger.error("    %s: %s" % \
                                             (pub["origin"], "timeout"))
                                 else:
-                                        emsg("    %s: %s" % \
+                                        logger.error("    %s: %s" % \
                                             (pub["origin"], err.args[0][1]))
                 else:
-                        emsg("   ", err)
+                        logger.error("   ", err)
 
         if cre.message:
-                emsg(cre.message)
+                logger.error(cre.message)
 
         return succeeded
 
@@ -2212,7 +2218,7 @@
                                 retcode = 1
 
                         for e in c["errors"]:
-                                emsg("\n" + str(e) + "\n")
+                                logger.error("\n" + str(e) + "\n")
 
                         if c["valid"]:
                                 msg(_(" Cert. Effective Date:"),
@@ -2458,19 +2464,19 @@
                     refresh_allowed=refresh_catalogs, progtrack=progtrack)
         except OSError, e:
                 # Ensure messages are displayed after the spinner.
-                emsg("\n")
+                logger.error("\n")
                 error(_("cannot create image at %(image_dir)s: %(reason)s") %
                     { "image_dir": image_dir, "reason": e.args[1] },
                     cmd="image-create")
                 return 1
         except api_errors.PermissionsException, e:
                 # Ensure messages are displayed after the spinner.
-                emsg("")
+                logger.error("")
                 error(e, cmd="image-create")
                 return 1
         except api_errors.InvalidDepotResponseException, e:
                 # Ensure messages are displayed after the spinner.
-                emsg("\n")
+                logger.error("\n")
                 error(_("The URI '%(pub_url)s' does not appear to point to a "
                     "valid pkg server.\nPlease check the server's "
                     "address and client's network configuration."
@@ -2628,12 +2634,12 @@
         if not http_proxy and not https_proxy:
                 return
 
-        emsg(_("\nThe following proxy configuration is set in the"
+        logger.error(_("\nThe following proxy configuration is set in the"
             " environment:\n"))
         if http_proxy:
-                emsg(_("http_proxy: %s\n") % http_proxy)
+                logger.error(_("http_proxy: %s\n") % http_proxy)
         if https_proxy:
-                emsg(_("https_proxy: %s\n") % https_proxy)
+                logger.error(_("https_proxy: %s\n") % https_proxy)
 
 
 # To allow exception handler access to the image.
@@ -2845,20 +2851,21 @@
         except api_errors.TransportError, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_TRANSPORT)
-                emsg(_("\nErrors were encountered while attempting to retrieve"
-                    " package or file data for\nthe requested operation."))
-                emsg(_("Details follow:\n\n%s") % __e)
+                logger.error(_("\nErrors were encountered while attempting "
+                    "to retrieve package or file data for\nthe requested "
+                    "operation."))
+                logger.error(_("Details follow:\n\n%s") % __e)
                 print_proxy_config()
                 __ret = 1
         except api_errors.InvalidDepotResponseException, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_TRANSPORT)
-                emsg(_("\nUnable to contact a valid package depot. "
+                logger.error(_("\nUnable to contact a valid package depot. "
                     "This may be due to a problem with the server, "
                     "network misconfiguration, or an incorrect pkg client "
                     "configuration.  Please check your network settings and "
                     "attempt to contact the server using a web browser."))
-                emsg(_("\nAdditional details:\n\n%s") % __e)
+                logger.error(_("\nAdditional details:\n\n%s") % __e)
                 print_proxy_config()
                 __ret = 1
         except history.HistoryLoadException, __e:
@@ -2928,4 +2935,5 @@
         misc.setlocale(locale.LC_ALL, "", error)
         gettext.install("pkg", "/usr/share/locale")
         __retval = handle_errors(main_func)
+        logging.shutdown()
         sys.exit(__retval)
--- a/src/depot.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/depot.py	Fri Oct 23 17:43:37 2009 -0500
@@ -35,14 +35,10 @@
 # particular--such that the pkg(1) pull client can operately accurately with
 # only a basic HTTP/HTTPS server in place.
 
-# XXX We should support simple "last-modified" operations via HEAD queries.
-
 # XXX Although we pushed the evaluation of next-version, etc. to the pull
 # client, we should probably provide a query API to do same on the server, for
 # dumb clients (like a notification service).
 
-# The default authority for the depot.
-AUTH_DEFAULT = "opensolaris.org"
 # The default repository path.
 REPO_PATH_DEFAULT = "/var/pkg/repo"
 # The default path for static and other web content.
@@ -100,15 +96,15 @@
         sys.exit(2)
 
 from pkg.misc import port_available, msg, emsg, setlocale
+import pkg.client.api_errors as api_errors
 import pkg.portable.util as os_util
 import pkg.search_errors as search_errors
-import pkg.server.catalog as catalog
-import pkg.server.config as config
-import pkg.server.depot as depot
+import pkg.server.depot as ds
 import pkg.server.depotresponse as dr
-import pkg.server.errors as errors
+import pkg.server.repository as sr
 import pkg.server.repositoryconfig as rc
 
+
 class LogSink(object):
         """This is a dummy object that we can use to discard log entries
         without relying on non-portable interfaces such as /dev/null."""
@@ -121,14 +117,17 @@
                 """Discard the bits."""
                 pass
 
+
 def usage(text):
         if text:
                 emsg(text)
 
         print """\
 Usage: /usr/lib/pkg.depotd [-d repo_dir] [-p port] [-s threads]
-           [-t socket_timeout] [--cfg-file] [--content-root] [--debug]
-           [--log-access dest] [--log-errors dest] [--mirror] [--nasty]
+           [-t socket_timeout] [--cfg-file] [--content-root]
+           [--disable-ops op[/1][,...]] [--debug] [--log-access dest]
+           [--log-errors dest] [--mirror] [--nasty]
+           [--set-property <section.property>=<value>]
            [--proxy-base url] [--readonly] [--rebuild] [--ssl-cert-file]
            [--ssl-dialog] [--ssl-key-file] [--writable-root dir]
 
@@ -138,6 +137,11 @@
                         the static and other web content used by the depot's
                         browser user interface.  The default value is
                         '/usr/share/lib/pkg'.
+        --disable-ops   A comma separated list of operations that the depot
+                        should not configure.  If, for example, you wanted
+                        to omit loading search v1, 'search/1' should be
+                        provided as an argument, or to disable all search
+                        operations, simply 'search'.
         --debug         The name of a debug feature to enable; or a whitespace
                         or comma separated list of features to enable.  Possible
                         values are: headers.
@@ -164,6 +168,10 @@
                         Cannot be used with --mirror or --rebuild.
         --rebuild       Re-build the catalog from pkgs in depot.  Cannot be
                         used with --mirror or --readonly.
+        --set-property  Used to specify initial repository configuration
+                        property values or to update existing ones; can
+                        be specified multiple times.  If used with --readonly
+                        this acts as a temporary override.
         --ssl-cert-file The absolute pathname to a PEM-encoded Certificate file.
                         This option must be used with --ssl-key-file.  Usage of
                         this option will cause the depot to only respond to SSL
@@ -198,6 +206,7 @@
         debug_features = {
             "headers": False,
         }
+        disable_ops = {}
         port = PORT_DEFAULT
         port_provided = False
         threads = THREADS_DEFAULT
@@ -243,11 +252,13 @@
                 log_routes["access"] = "stdout"
 
         opt = None
+        repo_props = {}
         try:
-                long_opts = ["cfg-file=", "content-root=", "debug=", "mirror",
-                    "nasty=", "proxy-base=", "readonly", "rebuild",
-                    "refresh-index", "ssl-cert-file=", "ssl-dialog=",
-                    "ssl-key-file=", "writable-root="]
+                long_opts = ["cfg-file=", "content-root=", "debug=",
+                    "disable-ops=", "mirror", "nasty=", "set-property=",
+                    "proxy-base=", "readonly", "rebuild", "refresh-index",
+                    "ssl-cert-file=", "ssl-dialog=", "ssl-key-file=",
+                    "writable-root="]
                 for opt in log_opts:
                         long_opts.append("%s=" % opt.lstrip('--'))
                 opts, pargs = getopt.getopt(sys.argv[1:], "d:np:s:t:",
@@ -295,6 +306,27 @@
                                                     "Invalid debug feature: " \
                                                     "%s." % f
                                         debug_features[f] = True
+                        elif opt == "--disable-ops":
+                                if arg is None or arg == "":
+                                        raise OptionError, \
+                                            "An argument must be specified."
+
+                                disableops = arg.split(",")
+                                for s in disableops:
+                                        if "/" in s:
+                                                op, ver = s.rsplit("/", 1)
+                                        else:
+                                                op = s
+                                                ver = "*"
+
+                                        if op not in \
+                                            ds.DepotHTTP.REPO_OPS_DEFAULT:
+                                                raise OptionError(
+                                                    "Invalid operation "
+                                                    "'%s'." % s)
+
+                                        disable_ops.setdefault(op, [])
+                                        disable_ops[op].append(ver)
                         elif opt in log_opts:
                                 if arg is None or arg == "":
                                         raise OptionError, \
@@ -316,6 +348,16 @@
                                             "for nasty option.\n Please " \
                                             "choose a value between 1 and 100."
                                 nasty = True
+                        elif opt == "--set-property":
+                                try:
+                                        prop, p_value = arg.split("=", 1)
+                                        p_sec, p_name = prop.split(".", 1)
+                                except ValueError:
+                                        usage(_("property arguments must be of "
+                                            "the form '<section.property>="
+                                            "<value>'."))
+                                repo_props.setdefault(p_sec, {})
+                                repo_props[p_sec][p_name] = p_value
                         elif opt == "--proxy-base":
                                 # Attempt to decompose the url provided into
                                 # its base parts.  This is done so we can
@@ -461,33 +503,6 @@
                 # Not applicable for reindexing operations.
                 content_root = None
 
-        fork_allowed = not reindex
-                
-        if nasty:
-                scfg = config.NastySvrConfig(repo_path, content_root,
-                    AUTH_DEFAULT, auto_create=not readonly,
-                    fork_allowed=fork_allowed, writable_root=writable_root)
-                scfg.set_nasty(nasty_value)
-        else:
-                scfg = config.SvrConfig(repo_path, content_root, AUTH_DEFAULT,
-                    auto_create=not readonly, fork_allowed=fork_allowed,
-                    writable_root=writable_root)
-
-        if readonly:
-                scfg.set_read_only()
-
-        if mirror:
-                scfg.set_mirror()
-
-
-        try:
-                scfg.init_dirs()
-        except (errors.SvrConfigError, EnvironmentError), _e:
-                print "pkg.depotd: an error occurred while trying to " \
-                    "initialize the depot repository directory " \
-                    "structures:\n%s" % _e
-                sys.exit(1)
-
         key_data = None
         if not reindex and ssl_cert_file and ssl_key_file and \
             ssl_dialog != "builtin":
@@ -597,22 +612,64 @@
                         # Since we've replaced cherrypy's log handler with our
                         # own, we don't want the output directed to a file.
                         dest = ""
-
                 gconf[log_type_map[log_type]["param"]] = dest
 
         cherrypy.config.update(gconf)
 
         # Now that our logging, etc. has been setup, it's safe to perform any
         # remaining preparation.
+
+        # Initialize repository state.
+        fork_allowed = not reindex
+        try:
+                repo = sr.Repository(auto_create=not readonly,
+                    cfgpathname=repo_config_file, fork_allowed=fork_allowed,
+                    log_obj=cherrypy, mirror=mirror, properties=repo_props,
+                    read_only=readonly, repo_root=repo_path,
+                    writable_root=writable_root)
+        except sr.RepositoryError, _e:
+                emsg("pkg.depotd: %s" % _e)
+                sys.exit(1)
+        except rc.RequiredPropertyValueError, _e:
+                emsg("pkg.depotd: repository configuration error: %s" % _e)
+                emsg("Please use the --set-property option to provide a value, "
+                    "or update the cfg_cache file for the repository to "
+                    "correct this.")
+                sys.exit(1)
+        except rc.PropertyError, _e:
+                emsg("pkg.depotd: repository configuration error: %s" % _e)
+                sys.exit(1)
+        except (search_errors.IndexingException,
+            api_errors.PermissionsException), _e:
+                emsg(str(_e), "INDEX")
+                sys.exit(1)
+
         if reindex:
+                # Initializing the repository above updated search indices
+                # as needed; nothing left to do, so exit.
+                sys.exit(0)
+
+        if nasty:
+                repo.cfg.set_nasty(nasty_value)
+
+        if rebuild:
                 try:
-                        scfg.acquire_catalog(rebuild=False, verbose=True)
+                        repo.rebuild()
+                except sr.RepositoryError, e:
+                        emsg(str(e), "REBUILD")
+                        sys.exit(1)
                 except (search_errors.IndexingException,
-                    catalog.CatalogPermissionsException,
-                    errors.SvrConfigError), e:
-                        cherrypy.log(str(e), "INDEX")
+                    api_errors.PermissionsException), e:
+                        emsg(str(e), "INDEX")
                         sys.exit(1)
-                sys.exit(0)
+
+        # Next, initialize depot.
+        if nasty:
+                depot = ds.NastyDepotHTTP(repo, content_root,
+                    disable_ops=disable_ops)
+        else:
+                depot = ds.DepotHTTP(repo, content_root,
+                    disable_ops=disable_ops)
 
         # Now build our site configuration.
         conf = {
@@ -624,7 +681,7 @@
             },
             "/robots.txt": {
                 "tools.staticfile.on": True,
-                "tools.staticfile.filename": os.path.join(scfg.web_root,
+                "tools.staticfile.filename": os.path.join(depot.web_root,
                     "robots.txt")
             },
         }
@@ -647,25 +704,8 @@
                 for entry in proxy_conf:
                         conf["/"][entry] = proxy_conf[entry]
 
-        scfg.acquire_in_flight()
         try:
-                scfg.acquire_catalog(rebuild=rebuild, verbose=True)
-        except (catalog.CatalogPermissionsException, errors.SvrConfigError), _e:
-                emsg("pkg.depotd: %s" % _e)
-                sys.exit(1)
-
-        try:
-                if nasty:
-                        root = cherrypy.Application(depot.NastyDepotHTTP(scfg,
-                            repo_config_file))
-                else:
-                        root = cherrypy.Application(depot.DepotHTTP(scfg,
-                            repo_config_file))
-        except rc.InvalidAttributeValueError, _e:
-                emsg("pkg.depotd: repository.conf error: %s" % _e)
-                sys.exit(1)
-
-        try:
+                root = cherrypy.Application(depot)
                 cherrypy.quickstart(root, config=conf)
         except Exception, _e:
                 emsg("pkg.depotd: unknown error starting depot server, " \
--- a/src/man/pkg.depotd.1m.txt	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/man/pkg.depotd.1m.txt	Fri Oct 23 17:43:37 2009 -0500
@@ -7,14 +7,16 @@
 SYNOPSIS
      /usr/lib/pkg.depotd [-d repo_dir] [-p port] [-s threads]
          [-t socket_timeout] [--cfg-file] [--content-root] [--debug]
-         [--log-access] [--log-errors] [--mirror] [--proxy-base url]
-         [--readonly] [--rebuild] [--ssl-cert-file] [--ssl-dialog]
-         [--ssl-key-file] [--writable-root]
+         [--disable-ops=<op[/1]>[,...]] [--log-access] [--log-errors]
+         [--mirror] [--set-property <section.property>=<value> ...]
+         [--proxy-base url] [--readonly] [--rebuild] [--ssl-cert-file]
+         [--ssl-dialog] [--ssl-key-file] [--writable-root]
 
 DESCRIPTION
      pkg.depotd is the depot server for the image packaging system.
-     pkg(1), the retrieval client, makes requests of the depot server
-     for catalogs and packages.  pkgsend(1), the publication client,
+     It provides clients access to the data contained within a package
+     repository.  pkg(1), the retrieval client, makes requests of the depot
+     server for catalogs and packages.  pkgsend(1), the publication client,
      sends new versions of packages to the depot server.  pkg.depotd is
      typically run as a service on the system.  Package and software
      developers may wish to run private copies for various testing
@@ -152,8 +154,14 @@
      --cfg-file cfg_file        Overrides pkg/cfg_file with the value
                                 given by cfg_file.
 
-     --content-root root_dir    Overrides pkg/content_root with the value
-                                given by root_dir.
+     --content-root root_dir    Overrides pkg/content_root with the
+                                value given by root_dir.
+
+     --disable-ops              Select operations that should be
+                                disabled in the depot server.
+                                Operations are given as <operation>[
+                                /<version>] (e.g. catalog, catalog_1)
+                                in a comma-separated list.
 
      --debug features           Overrides pkg/debug with the value given
                                 by features.
@@ -164,30 +172,38 @@
      --log-errors dest          Overrides pkg/log_errors with the value
                                 given by dest.
 
-     --mirror                   Overrides pkg/mirror and sets it to be true.
+     --mirror                   Overrides pkg/mirror and sets it to be
+                                true.
+
+     --set-property             Used to specify initial repository
+                                configuration property values or to
+                                update existing ones; can be specified
+                                multiple times.  If used with --readonly
+                                this acts as a temporary override.
 
      --proxy-base url           Overrides pkg/proxy_base with the value
                                 given by url.  Ignored if empty value is
                                 provided.
 
-     --readonly                 Overrides pkg/readonly and sets it to be true.
+     --readonly                 Overrides pkg/readonly and sets it to be
+                                true.
 
      --rebuild                  Any existing repository catalog will be
                                 destroyed and then recreated on startup.
                                 This option may not be combined with the
                                 --mirror or --readonly options.
 
-     --ssl-cert-file source     Overrides pkg/ssl_cert_file with the value
-                                given by source.
+     --ssl-cert-file source     Overrides pkg/ssl_cert_file with the
+                                value given by source.
 
-     --ssl-dialog type          Overrides pkg/ssl_dialog with the value given
-                                by type.
+     --ssl-dialog type          Overrides pkg/ssl_dialog with the value
+                                given by type.
 
-     --ssl-key-file source      Overrides pkg/ssl_key_file with the value
-                                given by source.
+     --ssl-key-file source      Overrides pkg/ssl_key_file with the
+                                value given by source.
 
-     --writable-root path       Overrides pkg/writable_root with the value
-                                given by path.
+     --writable-root path       Overrides pkg/writable_root with the
+                                value given by path.
 
 EXAMPLES
      Example 1:  Enabling the depot server.
@@ -208,6 +224,98 @@
                                 may differ from the supplied default
                                 content.
 
+REPOSITORY CONFIGURATION PROPERTIES
+    A package repository has the following configuration properties.  They
+    are categorized by section depending on their usage by the repository
+    and can be set using the --set-property option.
+
+        Section     Property            Description
+        ==========  ==========          ===============
+        feed        description         A descriptive paragraph for the
+                                        feed.
+
+                    icon                A filename of a small image that
+                                        is used to visually represent
+                                        the feed.
+
+                    logo                A filename of a large image that
+                                        is used to visually brand or
+                                        identify the feed.
+
+                    name                A short, descriptive name for
+                                        RSS/Atom feeds generated by the
+                                        depot serving the repository.
+
+                    window              A numeric value representing the
+                                        number of hours, before the feed
+                                        for the repository was last
+                                        generated, to include when
+                                        generating the feed for the
+                                        repository.
+
+        publisher   alias               An alternative name for the
+                                        default publisher of the
+                                        packages in the repository.
+
+                    prefix              The name of the default
+                                        publisher for packages;
+                                        required.
+
+        repository  collection_type     A string value indicating the
+                                        type of packages in the
+                                        repository.  See the pydoc for
+                                        pkg.client.publisher.Repository
+                                        for details.
+
+                    description         A string value containing a
+                                        descriptive paragraph for the
+                                        repository.
+
+                    detailed_url        A comma-separated list of URIs
+                                        where more information about the
+                                        repository can be found.
+
+                    legal_uris          A comma-separated list of URIs
+                                        where licensing, legal, and
+                                        terms of service information
+                                        for the repository can be found.
+
+                    maintainer          A human readable string
+                                        describing the entity
+                                        maintaining the repository.  For
+                                        an individual, this string is
+                                        expected to be their name or
+                                        name and email.
+
+                    maintainer_url      A URI associated with the entity
+                                        maintaining the repository.
+
+                    mirrors             A comma-separated list of URIs
+                                        where package content can be
+                                        retrieved.
+
+                    name                A short, descriptive name for
+                                        the repository.
+
+                    origins             A comma-separated list of URIs
+                                        where package metadata can be
+                                        retrieved.
+
+                    refresh_seconds     An integer value indicating the
+                                        number of seconds clients should
+                                        wait before refreshing cached
+                                        repository catalog or repository
+                                        metadata information.
+
+                    registration_uri    A URI indicating a location
+                                        clients can use to register or
+                                        obtain credentials needed to
+                                        access the repository.
+
+                    related_uris        A comma-separated list of URIs
+                                        of related repositories that a
+                                        client may be interested in.
+
 EXIT STATUS
      The following exit values are returned:
 
--- a/src/man/pkgsend.1.txt	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/man/pkgsend.1.txt	Fri Oct 23 17:43:37 2009 -0500
@@ -8,6 +8,7 @@
      /usr/bin/pkgsend [options] command [cmd_options] [operands]
 
      /usr/bin/pkgsend create-repository
+         --set-property <section.property>=<value> ...
      /usr/bin/pkgsend open [-en] pkg_fmri
      /usr/bin/pkgsend add action arguments
      /usr/bin/pkgsend import [-T pattern] bundlefile ...
@@ -41,6 +42,11 @@
           not already exist.  Only supported for 'file://' repository
           locations at this time.
 
+          Initial repository configuration values can be provided
+          using the --set-property option.  It may also be specified
+          multiple times.  See pkg.depotd(1M) for a full list of
+          repository properties.
+
      open [-en] pkg_fmri
           Begin a transaction on the package and version specified by
           pkg_fmri.
@@ -213,7 +219,8 @@
      Example 2:  Create a new repository suitable for use with pkgsend
          or with a pkg(5) depot server.
 
-     $ pkgsend -s file:///tmp/example_repo create-repository
+     $ pkgsend -s file:///tmp/example_repo create-repository \
+         --set-property publisher.prefix=opensolaris.org
 
 EXIT STATUS
      The following exit values are returned:
--- a/src/modules/actions/attribute.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/actions/attribute.py	Fri Oct 23 17:43:37 2009 -0500
@@ -83,7 +83,7 @@
                             (self.name, self.attrs["name"], w, None)
                             for w in self.attrs["value"].split()
                         ]
-                elif self.attrs["name"] == "fmri":
+                elif self.attrs["name"] in ("fmri", "pkg.fmri"):
                         fmri_obj = fmri.PkgFmri(self.attrs["value"])
 
                         return [
--- a/src/modules/catalog.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/catalog.py	Fri Oct 23 17:43:37 2009 -0500
@@ -33,7 +33,6 @@
 import sha
 import simplejson as json
 import stat
-import tempfile
 import threading
 
 import pkg.actions
@@ -49,7 +48,7 @@
         """Private helper class used to serialize catalog data and generate
         signatures."""
 
-        def __init__(self, data, root=None, sign=True):
+        def __init__(self, data, pathname=None, sign=True):
                 self.__data = data
                 self.__fileobj = None
 
@@ -59,34 +58,22 @@
                         # sha-1 hex is always 40 characters in length.
                         self.__sha_1_keyword = "sha-1-" + ("*" * 34)
                 self.__sign = sign
-                self.__root = root
-                self.pathname = None
-
-                if root:
-                        # Create a file to store the data as it is written.
-                        # The caller is responsible for renaming this file
-                        # as desired after save().
-                        try:
-                                tmp_num, tmpfile = tempfile.mkstemp(
-                                    dir=root)
-                        except EnvironmentError, e:
-                                if e.errno == errno.EACCES:
-                                        raise api_errors.PermissionsException(
-                                            e.filename)
-                                raise
-
-                        try:
-                                # use fdopen since a filehandle exists
-                                tfile = os.fdopen(tmp_num, "wb")
-                        except EnvironmentError, e:
-                                portable.remove(tmpfile)
-                                if e.errno == errno.EACCES:
-                                        raise api_errors.PermissionsException(
-                                            e.filename)
-                                raise
-
-                        self.__fileobj = tfile
-                        self.pathname = tmpfile
+                self.pathname = pathname
+
+                if not pathname:
+                        return
+
+                try:
+                        tfile = open(pathname, "wb")
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise api_errors.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
+                        raise
+                self.__fileobj = tfile
 
         def __offsets(self):
                 if not self.__sign:
@@ -221,7 +208,7 @@
                 self.__meta_root = path
 
         def destroy(self):
-                """Removes any on-disk files that exist for the catalog and
+                """Removes any on-disk files that exist for the catalog part and
                 discards all content."""
 
                 if self.pathname:
@@ -229,10 +216,13 @@
                                 try:
                                         portable.remove(self.pathname)
                                 except EnvironmentError, e:
-                                        if e.errno != errno.EACCES:
-                                                raise
-                                        raise api_errors.PermissionsException(
-                                            e.filename)
+                                        if e.errno == errno.EACCES:
+                                                raise api_errors.PermissionsException(
+                                                    e.filename)
+                                        if e.errno == errno.EROFS:
+                                                raise api_errors.ReadOnlyFileSystemException(
+                                                    e.filename)
+                                        raise
                 self.signatures = {}
                 self.loaded = False
                 self.last_modified = None
@@ -258,9 +248,13 @@
                         if e.errno == errno.ENOENT:
                                 raise api_errors.RetrievalError(e,
                                     location=location)
-                        elif e.errno == errno.EACCES:
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
+                        if e.errno == errno.EACCES:
                                 raise api_errors.PermissionsException(
                                     e.filename)
+                        raise
 
                 try:
                         struct = json.load(fobj)
@@ -291,27 +285,29 @@
 
                 'data' must be a dict."""
 
-                f = _JSONWriter(data, root=self.meta_root, sign=self.sign)
+                f = _JSONWriter(data, pathname=self.pathname, sign=self.sign)
                 f.save()
 
                 # Update in-memory copy to reflect stored data.
                 self.signatures = f.signatures()
 
-                # Ensure the permissions on the new file are correct, and then
-                # rename it into place.
-                location = os.path.join(self.meta_root, self.name)
+                # Ensure the permissions on the new file are correct.
                 try:
-                        os.chmod(f.pathname, self.__file_mode)
-                        portable.rename(f.pathname, location)
-                except EnvironmentError:
-                        portable.remove(f.pathname)
+                        os.chmod(self.pathname, self.__file_mode)
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise api_errors.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
                         raise
 
                 # Finally, set the file times to match the last catalog change.
                 if self.last_modified:
                         mtime = calendar.timegm(
                             self.last_modified.utctimetuple())
-                        os.utime(location, (mtime, mtime))
+                        os.utime(self.pathname, (mtime, mtime))
 
         meta_root = property(__get_meta_root, __set_meta_root)
 
@@ -336,7 +332,7 @@
                 entries."""
 
                 self.load()
-                for pub in self.__data:
+                for pub in self.publishers():
                         for stem in self.__data[pub]:
                                 for entry in self.__data[pub][stem]:
                                         yield pub, stem, entry
@@ -406,7 +402,7 @@
 
                 versions = {}
                 entries = {}
-                for pub in self.__data:
+                for pub in self.publishers():
                         ver_list = self.__data[pub].get(name, ())
                         for entry in ver_list:
                                 sver = entry["version"]
@@ -451,7 +447,7 @@
 
                 versions = {}
                 entries = {}
-                for pub in self.__data:
+                for pub in self.publishers():
                         try:
                                 ver_list = self.__data[pub][name]
                         except KeyError:
@@ -498,7 +494,7 @@
                 self.load()
                 package_count = 0
                 package_version_count = 0
-                for pub in self.__data:
+                for pub in self.publishers():
                         for stem in self.__data[pub]:
                                 package_count += 1
                                 package_version_count += \
@@ -521,10 +517,21 @@
                 self.load()
                 return set((
                     stem
-                    for pub in self.__data
+                    for pub in self.publishers()
                     for stem in self.__data[pub]
                 ))
 
+        def publishers(self):
+                """A generator function that returns publisher prefixes as it
+                iterates over the package data in the CatalogPart."""
+
+                self.load()
+                for pub in self.__data:
+                        # Any entries starting with "__" are part of the
+                        # reserved catalog namespace.
+                        if not pub[:2] == "__":
+                                yield pub
+
         def remove(self, pfmri, op_time=None):
                 """Remove a package and its metadata."""
 
@@ -597,11 +604,30 @@
                         ver_list.sort(cmp=order)
                         return
 
-                for pub in self.__data:
+                for pub in self.publishers():
                         for stem in self.__data[pub]:
                                 ver_list = self.__data[pub][stem]
                                 ver_list.sort(cmp=order)
 
+        def tuples(self):
+                """A generator function that produces FMRI tuples as it iterates
+                over the contents of the catalog part."""
+
+                self.load()
+
+                # Results have to be sorted by stem first, and by
+                # publisher prefix second.
+                pkg_list = [
+                        "%s!%s" % (stem, pub)
+                        for pub in self.publishers()
+                        for stem in self.__data[pub]
+                ]
+
+                for entry in sorted(pkg_list):
+                        stem, pub = entry.split("!", 1)
+                        for entry in self.__data[pub][stem]:
+                                yield pub, stem, entry["version"]
+
         def validate(self, signatures=None):
                 """Verifies whether the signatures for the contents of the
                 CatalogPart match the specified signature data, or if not
@@ -631,11 +657,12 @@
         ADD = "add"
         REMOVE = "remove"
 
-        def __init__(self, name, meta_root=None):
+        def __init__(self, name, meta_root=None, sign=True):
                 """Initializes a CatalogUpdate object."""
 
                 self.__data = {}
-                CatalogPartBase.__init__(self, name, meta_root=meta_root)
+                CatalogPartBase.__init__(self, name, meta_root=meta_root,
+                    sign=sign)
 
         def add(self, pfmri, operation, op_time, metadata=None):
                 """Records the specified catalog operation and any related
@@ -695,6 +722,17 @@
                         return
                 self.__data = CatalogPartBase.load(self)
 
+        def publishers(self):
+                """A generator function that returns publisher prefixes as it
+                iterates over the package data in the CatalogUpdate."""
+
+                self.load()
+                for pub in self.__data:
+                        # Any entries starting with "__" are part of the
+                        # reserved catalog namespace.
+                        if not pub[:2] == "__":
+                                yield pub
+
         def save(self):
                 """Transform and store the catalog update's data in a file using
                 the pathname <self.meta_root>/<self.name>."""
@@ -735,7 +773,7 @@
                             publisher=pub)
                         return (pfmri, entry["op-type"], op_time, mdata)
 
-                for pub in self.__data:
+                for pub in self.publishers():
                         for stem in self.__data[pub]:
                                 for entry in self.__data[pub][stem]:
                                         yield get_update(pub, stem, entry)
@@ -933,8 +971,9 @@
         """A Catalog is the representation of the package FMRIs available from
         a package repository."""
 
-        # XXX It would be nice to include available tags and package sizes,
-        # although this could also be calculated from the set of manifests.
+        __BASE_PART = "catalog.base.C"
+        __DEPS_PART = "catalog.dependency.C"
+        __SUMM_PART_PFX = "catalog.summary"
 
         # The file mode to be used for all catalog files.
         __file_mode = stat.S_IRUSR|stat.S_IWUSR|stat.S_IRGRP|stat.S_IROTH
@@ -996,13 +1035,13 @@
                 of convenience for callers that wish to trade integrity checks
                 for improved catalog serialization performance."""
 
-                self.__log_updates = log_updates
                 self.__batch_mode = batch_mode
                 self.__manifest_cb = manifest_cb
                 self.__parts = {}
                 self.__updates = {}
 
                 # Must be set after the above.
+                self.log_updates = log_updates
                 self.meta_root = meta_root
                 self.read_only = read_only
                 self.sign = sign
@@ -1010,10 +1049,9 @@
                 # Must be set after the above.
                 self._attrs = CatalogAttrs(meta_root=self.meta_root, sign=sign)
 
-                if not read_only:
-                        # This lock is used to protect the catalog file from
-                        # multiple threads writing to it at the same time.
-                        self.__lock = threading.Lock()
+                # This lock is used to protect the catalog file from multiple
+                # threads writing to it at the same time.
+                self.__lock = threading.Lock()
 
                 # Must be done last.
                 self.__set_perms()
@@ -1025,7 +1063,7 @@
                 package_count = 0
                 package_version_count = 0
 
-                part = self.get_part("catalog.base.C", must_exist=True)
+                part = self.get_part(self.__BASE_PART, must_exist=True)
                 if part:
                         # If the base Catalog didn't exist (in-memory or on-
                         # disk) that implies there is nothing to sort and
@@ -1055,7 +1093,12 @@
             excludes=EmptyI):
                 # Note that the logic below must be kept in sync with
                 # group_actions found in add_package.
-                m = self.__manifest_cb(f)
+                m = self.__manifest_cb(self, f)
+                if not m:
+                        # If the manifest callback returns None, then
+                        # assume there is no action data to yield.
+                        return
+
                 if Catalog.DEPENDENCY in info_needed:
                         atypes = ("depend", "set")
                 elif Catalog.SUMMARY in info_needed:
@@ -1121,7 +1164,8 @@
 
                 # Next, if the update hasn't been cached,
                 # create an object for it.
-                ulog = CatalogUpdate(name, meta_root=self.meta_root)
+                ulog = CatalogUpdate(name, meta_root=self.meta_root,
+                    sign=self.__sign)
                 if self.meta_root and must_exist and not ulog.exists:
                         # Update doesn't exist on-disk,
                         # so don't return anything.
@@ -1156,7 +1200,7 @@
                 attrs = self._attrs
                 attrs.last_modified = op_time
 
-                if not self.__log_updates:
+                if not self.log_updates:
                         return
 
                 updates = {}
@@ -1192,7 +1236,7 @@
                 the catalog."""
 
                 attrs = self._attrs
-                if self.__log_updates:
+                if self.log_updates:
                         for name, ulog in self.__updates.iteritems():
                                 ulog.save()
 
@@ -1296,13 +1340,16 @@
                 self.__sign = value
 
                 # If the Catalog's sign property changes, the value of that
-                # property for its attributes and parts must be changed too.
+                # property for its attributes, etc. must be changed too.
                 if self._attrs:
                         self._attrs.sign = value
 
                 for part in self.__parts.values():
                         part.sign = value
 
+                for ulog in self.__updates.values():
+                        ulog.sign = value
+
         def __set_version(self, value):
                 self._attrs.version = value
 
@@ -1317,11 +1364,18 @@
                 """A generator function that produces tuples of the format
                 (fmri, actions) as it iterates over the contents of the
                 catalog (where 'actions' is a generator that returns the
-                Actions corresponding to the requested information).  If
-                the catalog doesn't contain any action data for the package
+                Actions corresponding to the requested information).
+                
+                If the catalog doesn't contain any action data for the package
                 entry, and manifest_cb was defined at Catalog creation time,
                 the action data will be lazy-loaded by the actions generator;
-                otherwise it will return an empty iterator.
+                otherwise it will return an empty iterator.  This means that
+                the manifest_cb will be executed even for packages that don't
+                actually have any actions corresponding to info_needed.  For
+                example, if a package doesn't have any dependencies, the
+                manifest_cb will still be executed.  This was considered a
+                reasonable compromise as packages are generally expected to
+                have DEPENDENCY and SUMMARY information.
 
                 'excludes' is a list of variants which will be used to determine
                 what should be allowed by the actions generator in addition to
@@ -1348,6 +1402,8 @@
                 assert info_needed
                 if not locales:
                         locales = set(("C",))
+                else:
+                        locales = set(locales)
 
                 for f, entry in self.entries(info_needed=info_needed,
                     locales=locales):
@@ -1430,7 +1486,7 @@
                         if manifest:
                                 for k, v in manifest.signatures.iteritems():
                                         entry["signature-%s" % k] = v
-                        part = self.get_part("catalog.base.C")
+                        part = self.get_part(self.__BASE_PART)
                         entries[part.name] = part.add(pfmri, metadata=entry,
                             op_time=op_time)
 
@@ -1468,7 +1524,7 @@
                 finally:
                         self.__unlock_catalog()
 
-        def append(self, src, cb=None, pfmri=None):
+        def append(self, src, cb=None, pfmri=None, pubs=EmptyI):
                 """Appends the entries in the specified 'src' catalog to that
                 of the current catalog.  The caller is responsible for ensuring
                 that no duplicates exist, and for calling finalize() or save()
@@ -1485,12 +1541,20 @@
                 BASE record.
 
                 'pfmri' is an optional FMRI of a package to append.  If not
-                provided, all FMRIs in the 'src' catalog will be appended."""
-
-                assert not self.__log_updates and not self.read_only
-
-                base = self.get_part("catalog.base.C")
-                src_base = src.get_part("catalog.base.C", must_exist=True)
+                provided, all FMRIs in the 'src' catalog will be appended.
+                This filtering is applied before any provided callback.
+
+                'pubs' is an optional list of publisher prefixes to restrict
+                the append operation to.  FRMIs that have a publisher not in
+                the list will be skipped.  This filtering is applied before
+                any provided callback.  If not provided, no publisher
+                filtering will be applied.
+                """
+
+                assert not self.log_updates and not self.read_only
+
+                base = self.get_part(self.__BASE_PART)
+                src_base = src.get_part(self.__BASE_PART, must_exist=True)
                 if not src_base:
                         if pfmri:
                                 raise api_errors.UnknownCatalogEntry(pfmri)
@@ -1516,6 +1580,9 @@
 
                 d = {}
                 for f, entry in entries:
+                        if pubs and f.publisher not in pubs:
+                                continue
+
                         nentry = copy.deepcopy(entry)
                         if cb:
                                 merge, mdata = cb(src, f, entry)
@@ -1541,7 +1608,7 @@
                 # Finally, merge any catalog part entries that exist unless the
                 # FMRI is found in the 'd'iscard dict.
                 for name in src.parts.keys():
-                        if name == "catalog.base.C":
+                        if name == self.__BASE_PART:
                                 continue
 
                         part = src.get_part(name, must_exist=True)
@@ -1562,6 +1629,8 @@
 
                         npart = self.get_part(name)
                         for f, entry in entries:
+                                if pubs and f.publisher not in pubs:
+                                        continue
                                 if f.publisher in d and \
                                     f.pkg_name in d[f.publisher] and \
                                     f.version in d[f.publisher][f.pkg_name]:
@@ -1599,13 +1668,22 @@
                                                 continue
 
                                         lm = old_parts[pname]["last-modified"]
-                                        if op_time > lm:
+                                        if op_time <= lm:
                                                 # Only add updates to the part
                                                 # that occurred after the last
                                                 # time it was originally
                                                 # modified.
+                                                continue
+
+                                        if op_type == CatalogUpdate.ADD:
                                                 part.add(pfmri, metadata=pdata,
                                                     op_time=op_time)
+                                        elif op_type == CatalogUpdate.REMOVE:
+                                                part.remove(pfmri,
+                                                    op_time=op_time)
+                                        else:
+                                                raise api_errors.UnknownUpdateType(
+                                                    op_type)
 
                 def apply_full(name):
                         src = os.path.join(path, name)
@@ -1617,7 +1695,12 @@
                         old_batch_mode = self.batch_mode
                         self.batch_mode = True
 
-                        for name in self.get_updates_needed(path):
+                        updates = self.get_updates_needed(path)
+                        if updates == None:
+                                # Nothing has changed, so nothing to do.
+                                return
+
+                        for name in updates:
                                 if name.startswith("update."):
                                         # The provided update is an incremental.
                                         apply_incremental(name)
@@ -1653,6 +1736,7 @@
                         self._attrs = CatalogAttrs(meta_root=self.meta_root)
                         self.__set_perms()
                 finally:
+                        self.batch_mode = old_batch_mode
                         self.__unlock_catalog()
 
         @property
@@ -1679,7 +1763,7 @@
                 self.__parts = {}
                 self.__updates = {}
 
-        def entries(self, info_needed=None, locales=None):
+        def entries(self, info_needed=EmptyI, locales=None):
                 """A generator function that produces tuples of the format
                 (fmri, metadata) as it iterates over the contents of the
                 catalog (where 'metadata' is a dict containing the requested
@@ -1693,7 +1777,7 @@
                                 signature data, if available, using key-value
                                 pairs of the form 'signature-<name>': value.
 
-                'info_needed' is an optional set of one or more catalog
+                'info_needed' is an optional list of one or more catalog
                 constants indicating the types of catalog data that will
                 be returned in 'metadata' in addition to the above:
 
@@ -1714,27 +1798,26 @@
                 Note that unlike actions(), catalog entries will not lazy-load
                 action data if it is missing from the catalog."""
 
-                base = self.get_part("catalog.base.C", must_exist=True)
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         # Catalog contains nothing.
                         return
 
-                if not info_needed:
-                        info_needed = set()
                 if not locales:
                         locales = set(("C",))
+                else:
+                        locales = set(locales)
 
                 parts = []
                 if self.DEPENDENCY in info_needed:
-                        part = self.get_part("catalog.dependency.C",
-                            must_exist=True)
+                        part = self.get_part(self.__DEPS_PART, must_exist=True)
                         if part:
                                 parts.append(part)
 
                 if self.SUMMARY in info_needed:
                         for locale in locales:
                                 part = self.get_part(
-                                    "catalog.summary.%s" % locale,
+                                    "%s.%s" % (self.__SUMM_PART_PFX, locale),
                                     must_exist=True)
                                 if not part:
                                         # Data not available for this
@@ -1766,7 +1849,7 @@
                         merge_meta(f, mdata)
                         yield f, mdata
 
-        def entries_by_version(self, name, info_needed=None, locales=None):
+        def entries_by_version(self, name, info_needed=EmptyI, locales=None):
                 """A generator function that produces tuples of the format
                 (version, entries) as it iterates over the contents of the
                 the catalog, where entries is a list of tuples of the format
@@ -1781,7 +1864,7 @@
                                 signature data, if available, using key-value
                                 pairs of the form 'signature-<name>': value.
 
-                'info_needed' is an optional set of one or more catalog
+                'info_needed' is an optional list of one or more catalog
                 constants indicating the types of catalog data that will
                 be returned in 'metadata' in addition to the above:
 
@@ -1801,28 +1884,26 @@
                 should be returned.  The default is set(('C',)) if not provided.
                 """
 
-
-                base = self.get_part("catalog.base.C", must_exist=True)
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         # Catalog contains nothing.
                         return
 
-                if not info_needed:
-                        info_needed = set()
                 if not locales:
                         locales = set(("C",))
+                else:
+                        locales = set(locales)
 
                 parts = []
                 if self.DEPENDENCY in info_needed:
-                        part = self.get_part("catalog.dependency.C",
-                            must_exist=True)
+                        part = self.get_part(self.__DEPS_PART, must_exist=True)
                         if part:
                                 parts.append(part)
 
                 if self.SUMMARY in info_needed:
                         for locale in locales:
                                 part = self.get_part(
-                                    "catalog.summary.%s" % locale,
+                                    "%s.%s" % (self.__SUMM_PART_PFX, locale),
                                     must_exist=True)
                                 if not part:
                                         # Data not available for this
@@ -1881,7 +1962,7 @@
                 'objects' is an optional boolean value indicating whether
                 FMRIs should be returned as FMRI objects or as strings."""
 
-                base = self.get_part("catalog.base.C", must_exist=True)
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         # Catalog contains nothing.
 
@@ -1889,7 +1970,6 @@
                         # return no results properly to callers expecting
                         # a generator function.
                         return iter(())
-
                 return base.fmris(objects=objects)
 
         def fmris_by_version(self, name):
@@ -1897,7 +1977,7 @@
                 fmris), where fmris is a of the fmris related to the
                 version, for the given package name."""
 
-                base = self.get_part("catalog.base.C", must_exist=True)
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         # Catalog contains nothing.
 
@@ -1905,10 +1985,9 @@
                         # return no results properly to callers expecting
                         # a generator function.
                         return iter(())
-
                 return base.fmris_by_version(name)
 
-        def get_entry(self, pfmri, info_needed=None, locales=None):
+        def get_entry(self, pfmri, info_needed=EmptyI, locales=None):
                 """Returns a dict containing the metadata for the specified
                 FMRI containing the requested information.
 
@@ -1920,7 +1999,7 @@
                                 signature data, if available, using key-value
                                 pairs of the form 'signature-<name>': value.
 
-                'info_needed' is an optional set of one or more catalog
+                'info_needed' is an optional list of one or more catalog
                 constants indicating the types of catalog data that will
                 be returned in 'metadata' in addition to the above:
 
@@ -1958,11 +2037,8 @@
                                         continue
                                 merge_entry(entry, meta)
 
-                if not info_needed:
-                        info_needed = set()
-
                 parts = []
-                base = self.get_part("catalog.base.C", must_exist=True)
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         # Catalog contains nothing.
                         raise api_errors.UnknownCatalogEntry(
@@ -1970,6 +2046,8 @@
 
                 if not locales:
                         locales = set(("C",))
+                else:
+                        locales = set(locales)
 
                 # Always attempt to retrieve the BASE entry as FMRIs
                 # must be present in the BASE catalog part.
@@ -1978,7 +2056,7 @@
                 merge_entry(bentry, mdata)
 
                 if self.DEPENDENCY in info_needed:
-                        part = self.get_part("catalog.dependency.C",
+                        part = self.get_part(self.__DEPS_PART,
                             must_exist=True)
                         if part:
                                 parts.append(part)
@@ -1986,7 +2064,7 @@
                 if self.SUMMARY in info_needed:
                         for locale in locales:
                                 part = self.get_part(
-                                    "catalog.summary.%s" % locale,
+                                    "%s.%s" % (self.__SUMM_PART_PFX, locale),
                                     must_exist=True)
                                 if not part:
                                         # Data not available for this
@@ -2031,6 +2109,8 @@
                 assert info_needed
                 if not locales:
                         locales = set(("C",))
+                else:
+                        locales = set(locales)
 
                 entry = self.get_entry(pfmri, info_needed=info_needed,
                     locales=locales)
@@ -2069,6 +2149,20 @@
                                 continue
                         yield attr_name, a.attrs["value"]
 
+        def get_entry_signatures(self, pfmri):
+                """A generator function that yields tuples of the form (sig,
+                value) where 'sig' is the name of the signature, and 'value' is
+                the raw catalog value for the signature.  Please note that the
+                data type of 'value' is dependent on the signature, so it may
+                be a string, list, dict, etc."""
+
+                entry = self.get_entry(pfmri)
+                return (
+                    (k.split("signature-")[1], v)
+                    for k, v in entry.iteritems()
+                    if k.startswith("signature-")
+                )
+
         def get_entry_variants(self, pfmri, name):
                 """A generator function that returns the variants for the
                 specified variant name.  If no variants exist for the
@@ -2117,18 +2211,31 @@
                 """Returns a list of the catalog files needed to update
                 the existing catalog parts, based on the contents of the
                 catalog.attrs file in the directory indicated by 'path'.
-                """
+                A value of None will be returned if the the catalog has
+                not been modified, while an empty list will be returned
+                if no catalog parts need to be updated, but the catalog
+                itself has changed."""
 
                 new_attrs = CatalogAttrs(meta_root=path)
                 if not new_attrs.exists:
-                        # Assume no updates needed.
-                        return []
+                        # No updates needed (not even to attrs), so return None.
+                        return None
+
+                old_attrs = self._attrs
+                if old_attrs.created != new_attrs.created:
+                        # It's very likely that the catalog has been recreated
+                        # or this is a completely different catalog than was
+                        # expected.  In either case, an update isn't possible.
+                        raise api_errors.BadCatalogUpdateIdentity(path)
+
+                if new_attrs.last_modified == old_attrs.last_modified:
+                        # No updates needed (not even to attrs), so return None.
+                        return None
 
                 # First, verify that all of the catalog parts the client has
                 # still exist.  If they no longer exist, the catalog is no
                 # longer valid and cannot be updated.
                 parts = {}
-                old_attrs = self._attrs
                 incremental = True
                 for name in old_attrs.parts:
                         if name not in new_attrs.parts:
@@ -2156,12 +2263,44 @@
                         if logname not in new_attrs.updates:
                                 incremental = False
 
-                        if locale not in parts:
-                                parts[locale] = set()
+                        parts.setdefault(locale, set())
                         parts[locale].add(name)
 
+                # XXX in future, add current locale to this.  For now, just
+                # ensure that all of the locales of parts that were changed
+                # and exist on-disk are included.
+                locales = set(("C",))
+                locales.update(set(parts.keys()))
+
+                # Now determine if there are any new parts for this locale that
+                # this version of the API knows how to use that the client
+                # doesn't already have.
+                for name in new_attrs.parts:
+                        if name in parts or name in old_attrs.parts:
+                                continue
+
+                        # The last component of the name is the locale.
+                        locale = name.split(".", 2)[2]
+                        if locale not in locales:
+                                continue
+
+                        # Currently, only these parts are used by the client,
+                        # so only they need to be retrieved.
+                        if name == self.__BASE_PART or \
+                            name == self.__DEPS_PART or \
+                            name.startswith(self.__SUMM_PART_PFX):
+                                incremental = False
+
+                                # If a new part has been added for the current
+                                # locale, then incremental updates can't be
+                                # performed since updates for this locale can
+                                # only be applied to parts that already exist.
+                                parts.setdefault(locale, set())
+                                parts[locale].add(name)
+
                 if not parts:
-                        # No updates needed.
+                        # No updates needed to catalog parts on-disk, but
+                        # catalog has changed.
                         return []
                 elif not incremental:
                         # Since an incremental update cannot be performed,
@@ -2180,6 +2319,9 @@
                         # needed for an incremental update.
                         last_lm = None
                         for name in parts[locale]:
+                                if name not in old_attrs.parts:
+                                        continue
+
                                 lm = old_attrs.parts[name]["last-modified"]
                                 if not last_lm or lm > last_lm:
                                         last_lm = lm
@@ -2214,7 +2356,7 @@
                 """Returns a set containing the names of all the packages in
                 the Catalog."""
 
-                base = self.get_part("catalog.base.C", must_exist=True)
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         # Catalog contains nothing.
                         return set()
@@ -2237,6 +2379,16 @@
 
                 return self._attrs.parts
 
+        def publishers(self):
+                """Returns a set containing the prefixes of all the publishers
+                in the Catalog."""
+
+                base = self.get_part(self.__BASE_PART, must_exist=True)
+                if not base:
+                        # Catalog contains nothing.
+                        return set()
+                return set(p for p in base.publishers())
+
         def remove_package(self, pfmri):
                 """Remove a package and its metadata."""
 
@@ -2261,11 +2413,16 @@
                                 try:
                                         pkg_entry = part.get_entry(pfmri)
                                 except api_errors.UnknownCatalogEntry:
-                                        # Skip; part doesn't have this package.
+                                        if name == self.__BASE_PART:
+                                                # Entry should exist in at least
+                                                # the base part.
+                                                raise
+                                        # Skip; package's presence is optional
+                                        # in other parts.
                                         continue
 
                                 part.remove(pfmri, op_time=op_time)
-                                if self.__log_updates:
+                                if self.log_updates:
                                         entries[part.name] = pkg_entry
 
                         self.__log_update(pfmri, CatalogUpdate.REMOVE, op_time,
@@ -2306,6 +2463,20 @@
                                                 continue
                 return sigs
 
+        def tuples(self):
+                """A generator function that produces FMRI tuples as it iterates
+                over the contents of the catalog."""
+
+                base = self.get_part(self.__BASE_PART, must_exist=True)
+                if not base:
+                        # Catalog contains nothing.
+
+                        # This construction is necessary to get python to
+                        # return no results properly to callers expecting
+                        # a generator function.
+                        return iter(())
+                return base.tuples()
+
         @property
         def updates(self):
                 """A dict containing the list of known updates for the catalog
@@ -2324,9 +2495,9 @@
                 'metadata' must be a dict of additional metadata to store with
                 the package's BASE record."""
 
-                assert not self.__log_updates and not self.read_only
-
-                base = self.get_part("catalog.base.C", must_exist=True)
+                assert not self.log_updates and not self.read_only
+
+                base = self.get_part(self.__BASE_PART, must_exist=True)
                 if not base:
                         raise api_errors.UnknownCatalogEntry(pfmri.get_fmri())
 
@@ -2373,6 +2544,32 @@
         sign = property(__get_sign, __set_sign)
         version = property(__get_version, __set_version)
 
+
+# Methods used by external callers
+def verify(filename):
+        """Convert the catalog part named by filename into the correct
+        type of Catalog object and then call its validate method to ensure
+        that is contents are self-consistent."""
+
+        path, fn = os.path.split(filename)
+        catobj = None
+
+        if fn.startswith("catalog"):
+                if fn.endswith("attrs"):
+                        catobj = CatalogAttrs(meta_root=path)
+                else:
+                        catobj = CatalogPart(fn, meta_root=path)
+        elif fn.startswith("update"):
+                catobj = CatalogUpdate(fn, meta_root=path)
+        else:
+                # Unrecognized.
+                raise api_errors.UnrecognizedCatalogPart(fn) 
+
+        # With the else case above, this should never be None.
+        assert catobj
+
+        catobj.validate()
+
 # Methods used by Catalog classes.
 def datetime_to_ts(dt):
         """Take datetime object dt, and convert it to a ts in ISO-8601
--- a/src/modules/client/__init__.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/__init__.py	Fri Oct 23 17:43:37 2009 -0500
@@ -23,16 +23,29 @@
 # Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
 # Use is subject to license terms.
 
+import logging
 import os
+import sys
 
 __all__ = ["global_settings"]
 
+class _LogFilter(logging.Filter):
+        def __init__(self, max_level=logging.CRITICAL):
+                logging.Filter.__init__(self)
+                self.max_level = max_level
+
+        def filter(self, record):
+                return record.levelno <= self.max_level
+
 class GlobalSettings(object):
         """ This class defines settings which are global
             to the client instance """
 
         def __init__(self):
                 object.__init__(self)
+                self.__info_log_handler = None
+                self.__error_log_handler = None
+                self.__verbose = False
                 self.client_name = None
                 self.pkg_client_max_timeout_default = 4
                 self.pkg_client_connect_timeout_default = 60
@@ -66,5 +79,91 @@
                 except ValueError:
                         self.PKG_CLIENT_LOWSPEED_TIMEOUT = \
                             self.pkg_client_lowspeed_timeout_default
+                self.reset_logging()
+
+        def __get_error_log_handler(self):
+                return self.__error_log_handler
+
+        def __get_info_log_handler(self):
+                return self.__info_log_handler
+
+        def __get_verbose(self):
+                return self.__verbose
+
+        def __set_error_log_handler(self, val):
+                logger = logging.getLogger("pkg")
+                if self.__error_log_handler:
+                        logger.removeHandler(self.__error_log_handler)
+                self.__error_log_handler = val
+                if val:
+                        logger.addHandler(val)
+
+        def __set_info_log_handler(self, val):
+                logger = logging.getLogger("pkg")
+                if self.__info_log_handler:
+                        logger.removeHandler(self.__info_log_handler)
+                self.__info_log_handler = val
+                if val:
+                        logger.addHandler(val)
+
+        def __set_verbose(self, val):
+                if self.__info_log_handler:
+                        if val:
+                                level = logging.DEBUG
+                        else:
+                                level = logging.INFO
+                        self.__info_log_handler.setLevel(level)
+                self.__verbose = val
+
+        @property
+        def logger(self):
+                return logging.getLogger("pkg")
+
+        def reset_logging(self):
+                """Resets client logging to its default state.  This will cause
+                all logging.INFO entries to go to sys.stdout, and all entries of
+                logging.WARNING or higher to go to sys.stderr."""
+
+                logger = logging.getLogger("pkg")
+                logger.setLevel(logging.DEBUG)
+
+                # Don't pass messages that are rejected to the root logger.
+                logger.propagate = 0
+
+                # By default, log all informational messages, but not warnings
+                # and above to stdout.
+                info_h = logging.StreamHandler(sys.stdout)
+
+                # Minimum logging level for informational messages.
+                if self.verbose:
+                        info_h.setLevel(logging.DEBUG)
+                else:
+                        info_h.setLevel(logging.INFO)
+
+                log_fmt = logging.Formatter()
+
+                # Enforce maximum logging level for informational messages.
+                info_f = _LogFilter(logging.INFO)
+                info_h.addFilter(info_f)
+                info_h.setFormatter(log_fmt)
+                logger.addHandler(info_h)
+
+                # By default, log all warnings and above to stderr.
+                error_h = logging.StreamHandler(sys.stderr)
+                error_h.setFormatter(log_fmt)
+                error_h.setLevel(logging.WARNING)
+                logger.addHandler(error_h)
+
+                # Stash the handles so they can be removed later.
+                self.info_log_handler = info_h
+                self.error_log_handler = error_h
+
+        error_log_handler = property(__get_error_log_handler,
+            __set_error_log_handler)
+
+        info_log_handler = property(__get_info_log_handler,
+            __set_info_log_handler)
+
+        verbose = property(__get_verbose, __set_verbose)
 
 global_settings = GlobalSettings()
--- a/src/modules/client/api.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/api.py	Fri Oct 23 17:43:37 2009 -0500
@@ -52,6 +52,8 @@
 CURRENT_API_VERSION = 22
 CURRENT_P5I_VERSION = 1
 
+logger = global_settings.logger
+
 class ImageInterface(object):
         """This class presents an interface to images that clients may use.
         There is a specific order of methods which must be used to install
@@ -162,7 +164,7 @@
                 try:
                         self.__img.check_cert_validity()
                 except api_errors.ExpiringCertificate, e:
-                        misc.emsg(e)
+                        logger.error(e)
                 except:
                         exc_type, exc_value, exc_traceback = sys.exc_info()
                         if exc_type in log_op_end:
@@ -844,6 +846,60 @@
                                 else:
                                         fmris.append(npmatch[0])
 
+                if local:
+                        img_cat = self.__img.get_catalog(
+                            self.__img.IMG_CATALOG_INSTALLED)
+                else:
+                        img_cat = self.__img.get_catalog(
+                            self.__img.IMG_CATALOG_KNOWN)
+                excludes = self.__img.list_excludes()
+
+                # Set of summary-related options that are in catalog data.
+                summ_opts = frozenset([PackageInfo.SUMMARY,
+                    PackageInfo.CATEGORIES, PackageInfo.DESCRIPTION])
+
+                # Set of all options that are in catalog data.
+                cat_opts = summ_opts | frozenset([PackageInfo.DEPENDENCIES])
+
+                # Set of options that require manifest retrieval.
+                act_opts = PackageInfo.ACTION_OPTIONS - \
+                    frozenset([PackageInfo.DEPENDENCIES])
+
+                def get_pkg_cat_data(f):
+                        # XXX this doesn't handle locale.
+                        get_summ = summ = desc = cat_info = deps = None
+                        cat_data = []
+                        if summ_opts & info_needed:
+                                cat_data.append(img_cat.SUMMARY)
+                                get_summ = PackageInfo.SUMMARY in info_needed
+                        if PackageInfo.CATEGORIES in info_needed:
+                                cat_info = []
+                        if PackageInfo.DEPENDENCIES in info_needed:
+                                cat_data.append(img_cat.DEPENDENCY)
+                                deps = []
+
+                        for a in img_cat.get_entry_actions(f, cat_data,
+                            excludes=excludes):
+                                if a.name == "depend":
+                                        deps.append(a.attrs.get(a.key_attr))
+                                elif a.attrs["name"] == "pkg.summary":
+                                        if get_summ:
+                                                summ = a.attrs["value"]
+                                elif a.attrs["name"] in ("description",
+                                    "pkg.description"):
+                                        desc = a.attrs["value"]
+                                elif cat_info != None and a.has_category_info():
+                                        cat_info.extend(
+                                            PackageCategory(scheme, cat)
+                                            for scheme, cat
+                                            in a.parse_category_info())
+
+                        if get_summ and summ == None:
+                                summ = desc
+                        if not PackageInfo.DESCRIPTION in info_needed:
+                                desc = None
+                        return summ, desc, cat_info, deps
+
                 pis = []
                 for f in fmris:
                         pub = name = version = release = None
@@ -869,24 +925,23 @@
                         summary = size = licenses = cat_info = description = \
                             None
 
-                        if (frozenset([PackageInfo.SIZE, PackageInfo.LICENSES,
-                            PackageInfo.SUMMARY, PackageInfo.CATEGORIES,
-                            PackageInfo.DESCRIPTION]) |
-                            PackageInfo.ACTION_OPTIONS) & info_needed:
+                        if frozenset([PackageInfo.SUMMARY,
+                            PackageInfo.CATEGORIES,
+                            PackageInfo.DESCRIPTION,
+                            PackageInfo.DEPENDENCIES]) & info_needed:
+                                summary, description, cat_info, dependencies = \
+                                    get_pkg_cat_data(f)
+
+                        if (frozenset([PackageInfo.SIZE,
+                            PackageInfo.LICENSES]) | act_opts) & info_needed:
                                 mfst = self.__img.get_manifest(f)
-                                excludes = self.__img.list_excludes()
+                                if PackageInfo.LICENSES in info_needed:
+                                        licenses = self.__licenses(mfst, local)
+
                                 if PackageInfo.SIZE in info_needed:
                                         size = mfst.get_size(excludes=excludes)
-                                if PackageInfo.LICENSES in info_needed:
-                                        licenses = self.__licenses(mfst, local)
-                                if PackageInfo.SUMMARY in info_needed:
-                                        summary = mfst.get("pkg.summary",
-                                            mfst.get("description", ""))
-                                if PackageInfo.DESCRIPTION in info_needed:
-                                        description = \
-                                            mfst.get("pkg.description", "")
 
-                                if PackageInfo.ACTION_OPTIONS & info_needed:
+                                if act_opts & info_needed:
                                         if PackageInfo.LINKS in info_needed:
                                                 links = list(
                                                     mfst.gen_key_attribute_value_by_type(
@@ -903,21 +958,6 @@
                                                 dirs = list(
                                                     mfst.gen_key_attribute_value_by_type(
                                                     "dir", excludes))
-                                        if PackageInfo.DEPENDENCIES in \
-                                            info_needed:
-                                                dependencies = list(
-                                                    mfst.gen_key_attribute_value_by_type(
-                                                    "depend", excludes))
-
-                                if PackageInfo.CATEGORIES in info_needed:
-                                        cat_info = [
-                                            PackageCategory(scheme, cat)
-                                            for ca
-                                            in mfst.gen_actions_by_type("set")
-                                            if ca.has_category_info()
-                                            for scheme, cat
-                                            in ca.parse_category_info()
-                                        ]
 
                         pis.append(PackageInfo(pkg_stem=name, summary=summary,
                             category_info_list=cat_info, state=state,
--- a/src/modules/client/api_errors.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/api_errors.py	Fri Oct 23 17:43:37 2009 -0500
@@ -395,6 +395,15 @@
                 return _("Unknown catalog update type '%s'") % self.data
 
 
+class UnrecognizedCatalogPart(CatalogError):
+        """Raised when the catalog finds a CatalogPart that is unrecognized
+        or invalid."""
+
+        def __str__(self):
+                return _("Unrecognized, unknown, or invalid CatalogPart '%s'") \
+                    % self.data
+
+
 class InventoryException(ApiException):
         """Used to indicate that some of the specified patterns to a catalog
         matching function did not match any catalog entries."""
@@ -1019,6 +1028,20 @@
         def __str__(self):
                 return _("Unknown repository mirror '%s'.") % self.data
 
+class UnsupportedRepositoryOperation(PublisherError):
+        """The publisher has no active repositories that support the
+        requested operation."""
+
+        def __init__(self, pub, operation):
+                ApiException.__init__(self)
+                self.data = None
+                self.kwargs = None
+                self.pub = pub
+                self.op = operation
+
+        def __str__(self):
+                return _("Publisher '%s' has no repositories that support the"
+                    " '%s' operation.") % (self.pub.prefix, self.op)
 
 class UnknownRepositoryOrigin(PublisherError):
         """Used to indicate that a repository URI could not be found in the
--- a/src/modules/client/bootenv.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/bootenv.py	Fri Oct 23 17:43:37 2009 -0500
@@ -26,9 +26,11 @@
 import os
 import tempfile
 
+from pkg.client import global_settings
+logger = global_settings.logger
+
 import pkg.client.api_errors as api_errors
 import pkg.pkgsubprocess as subprocess
-from pkg.misc import msg, emsg
 
 # Since pkg(1) may be installed without libbe installed
 # check for libbe and import it if it exists.
@@ -129,7 +131,7 @@
                         if err == 0:
                                 self.snapshot_name = snapshot_name
                         else:
-                                emsg(_("pkg: unable to create an auto "
+                                logger.error(_("pkg: unable to create an auto "
                                     "snapshot. pkg recovery is disabled."))
                                 raise RuntimeError, "recoveryDisabled"
                         self.is_valid = True
@@ -262,14 +264,14 @@
                                     stdout = file("/dev/null"),
                                     stderr = subprocess.STDOUT)
                         except OSError, e:
-                                emsg(_("pkg: A system error %(e)s was caught "
-                                    "executing %(cmd)s") %
-                                    { "e": e, "cmd": " ".join(cmd) })
+                                logger.error(_("pkg: A system error %(e)s was "
+                                    "caught executing %(cmd)s") % { "e": e,
+                                    "cmd": " ".join(cmd) })
 
                         if ret != 0:
-                                emsg(_("pkg: '%(cmd)s' failed. \nwith a "
-                                    "return code of %(ret)d.") %
-                                    { "cmd": " ".join(cmd), "ret": ret })
+                                logger.error(_("pkg: '%(cmd)s' failed. \nwith "
+                                    "a return code of %(ret)d.") % {
+                                    "cmd": " ".join(cmd), "ret": ret })
                                 return
 
                 def activate_live_be(cmd):
@@ -279,7 +281,7 @@
                         exec_cmd(cmd)
 
                         if be.beActivate(self.be_name_clone) != 0:
-                                emsg(_("pkg: unable to activate %s") \
+                                logger.error(_("pkg: unable to activate %s") \
                                     % self.be_name_clone)
                                 return
 
@@ -289,13 +291,13 @@
                         self.img.history.log_operation_end()
 
                         if be.beUnmount(self.be_name_clone) != 0:
-                                emsg(_("pkg: unable to unmount %s") \
+                                logger.error(_("pkg: unable to unmount %s") \
                                     % self.clone_dir)
                                 return
 
                         os.rmdir(self.clone_dir)
 
-                        msg(_("""
+                        logger.info(_("""
 A clone of %s exists and has been updated and activated.
 On the next boot the Boot Environment %s will be mounted on '/'.
 Reboot when ready to switch to this updated BE.
@@ -309,7 +311,7 @@
                         cmd += [self.root]
                         exec_cmd(cmd)
 
-                        msg(_("%s has been updated successfully") % \
+                        logger.info(_("%s has been updated successfully") % \
                                 (self.be_name))
 
                         os.rmdir(self.clone_dir)
@@ -341,18 +343,20 @@
                 # Leave the clone around for debugging purposes if we're
                 # operating on the live BE.
                 if self.is_live_BE:
-                        emsg(_(" The running system has not been modified. "
-                            "Modifications were only made to a clone of the "
-                            "running system.  This clone is mounted at %s "
-                            "should you wish to inspect it.") % self.clone_dir)
+                        logger.error(_(" The running system has not been "
+                            "modified. Modifications were only made to a clone "
+                            "of the running system.  This clone is mounted at "
+                            "%s should you wish to inspect it.") % \
+                            self.clone_dir)
 
                 else:
                         # Rollback and destroy the snapshot.
                         try:
                                 if be.beRollback(self.be_name,
                                     self.snapshot_name) != 0:
-                                        emsg(_("pkg: unable to rollback BE %s "
-                                            "and restore image") % self.be_name)
+                                        logger.error(_("pkg: unable to "
+                                            "rollback BE %s and restore "
+                                            "image") % self.be_name)
 
                                 self.destroy_snapshot()
                                 os.rmdir(self.clone_dir)
@@ -360,8 +364,9 @@
                                 self.img.history.log_operation_error(error=e)
                                 raise e
 
-                        msg(_("%s failed to be updated. No changes have been "
-                            "made to %s.") % (self.be_name, self.be_name))
+                        logger.error(_("%s failed to be updated. No changes "
+                            "have been made to %s.") % (self.be_name,
+                            self.be_name))
 
         def destroy_snapshot(self):
 
@@ -372,8 +377,8 @@
                         BootEnv to manage multiple snapshots."""
 
                 if be.beDestroySnapshot(self.be_name, self.snapshot_name) != 0:
-                        emsg(_("pkg: unable to destroy snapshot %s") % \
-                            self.snapshot_name)
+                        logger.error(_("pkg: unable to destroy snapshot "
+                            "%s") % self.snapshot_name)
 
         def restore_install_uninstall(self):
 
@@ -401,37 +406,36 @@
                                     be.beCopy(self.be_name_clone, \
                                     self.be_name, self.snapshot_name)
                                 if ret != 0:
-                                        emsg(_("pkg: unable to create BE %s") \
-                                           % self.be_name_clone)
+                                        logger.error(_("pkg: unable to create "
+                                            "BE %s") % self.be_name_clone)
                                         return
 
                         if be.beMount(self.be_name_clone, self.clone_dir) != 0:
-                                emsg(_("pkg: unable to mount BE %(name)s "
-                                    "on %(clone_dir)s") %
-                                    { "name": self.be_name_clone,
-                                      "clone_dir": self.clone_dir })
+                                logger.error(_("pkg: unable to mount BE "
+                                    "%(name)s on %(clone_dir)s") % {
+                                    "name": self.be_name_clone,
+                                    "clone_dir": self.clone_dir })
                                 return
 
-                        emsg(_("The Boot Environment %(name)s failed to be "
-                            "updated. A snapshot was taken before the failed "
-                            "attempt and is mounted here %(clone_dir)s. Use "
-                            "'beadm unmount %(clone_name)s' and then 'beadm "
-                            "activate %(clone_name)s' if you wish to boot "
-                            "to this BE.") %
-                            { "name": self.be_name,
-                              "clone_dir": self.clone_dir,
-                              "clone_name": self.be_name_clone })
+                        logger.error(_("The Boot Environment %(name)s failed "
+                            "to be updated. A snapshot was taken before the "
+                            "failed attempt and is mounted here %(clone_dir)s. "
+                            "Use 'beadm unmount %(clone_name)s' and then "
+                            "'beadm activate %(clone_name)s' if you wish to "
+                            "boot to this BE.") % { "name": self.be_name,
+                            "clone_dir": self.clone_dir,
+                            "clone_name": self.be_name_clone })
                 else:
                         if be.beRollback(self.be_name, self.snapshot_name) != 0:
-                                emsg("pkg: unable to rollback BE %s" % \
-                                    self.be_name)
+                                logger.error("pkg: unable to rollback BE "
+                                    "%s" % self.be_name)
 
                         self.destroy_snapshot()
 
-                        emsg(_("The Boot Environment %s failed to be updated. "
-                          "A snapshot was taken before the failed attempt "
-                          "and has been restored so no changes have been "
-                          "made to %s.") % (self.be_name, self.be_name))
+                        logger.error(_("The Boot Environment %s failed to be "
+                            "updated. A snapshot was taken before the failed "
+                            "attempt and has been restored so no changes have "
+                            "been made to %s.") % (self.be_name, self.be_name))
 
         def activate_install_uninstall(self):
                 """Activate an install/uninstall attempt. Which just means
--- a/src/modules/client/image.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/image.py	Fri Oct 23 17:43:37 2009 -0500
@@ -29,11 +29,13 @@
 import os
 import platform
 import shutil
-import stat
 import tempfile
 import time
 import urllib
 
+from pkg.client import global_settings
+logger = global_settings.logger
+
 import pkg.Uuid25
 import pkg.catalog
 import pkg.client.api_errors            as api_errors
@@ -56,9 +58,7 @@
 
 from pkg.client.debugvalues import DebugValues
 from pkg.client.imagetypes import IMG_USER, IMG_ENTIRE
-from pkg.misc import CfgCacheError
-from pkg.misc import EmptyI, EmptyDict
-from pkg.misc import msg, emsg
+from pkg.misc import CfgCacheError, EmptyI, EmptyDict
 
 img_user_prefix = ".org.opensolaris,pkg"
 img_root_prefix = "var/pkg"
@@ -217,7 +217,8 @@
                                 raise api_errors.ImageAlreadyExists(self.root)
                         if not force and os.path.exists(self.root) and \
                             len(os.listdir(self.root)) > 0:
-                                raise api_errors.CreatingImageInNonEmptyDir(self.root)
+                                raise api_errors.CreatingImageInNonEmptyDir(
+                                    self.root)
                         self.__set_dirs(root=self.root, imgtype=imgtype,
                             progtrack=progtrack)
 
@@ -239,6 +240,13 @@
                                 return False
                 return True
 
+        def __catalog_loaded(self, name):
+                """Returns a boolean value indicating whether the named catalog
+                has already been loaded.  This is intended to be used as an
+                optimization function to determine which catalog to request."""
+
+                return name in self.__catalogs
+
         def image_type(self, d):
                 """Returns the type of image at directory: d; or None"""
                 rv = None
@@ -341,6 +349,9 @@
                                 if e.errno == errno.EACCES:
                                         raise api_errors.PermissionsException(
                                             e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
                                 raise
 
         def __set_dirs(self, imgtype, root, progtrack=None):
@@ -414,16 +425,13 @@
 
                 # Once its structure is valid, then ensure state information
                 # is intact.
-                rebuild = False
                 kdir = os.path.join(self.imgdir, "state",
                     self.IMG_CATALOG_KNOWN)
                 kcattrs = os.path.join(kdir, "catalog.attrs")
                 idir = os.path.join(self.imgdir, "state",
                     self.IMG_CATALOG_INSTALLED)
                 icattrs = os.path.join(idir, "catalog.attrs")
-                if not os.path.isfile(icattrs):
-                        rebuild = True
-                elif not os.path.isfile(kcattrs) and os.path.isfile(icattrs):
+                if not os.path.isfile(kcattrs) and os.path.isfile(icattrs):
                         # If the known catalog doesn't exist, but the installed
                         # catalog does, then copy the installed catalog to the
                         # known catalog directory so that state information can
@@ -431,9 +439,6 @@
                         for fname in os.listdir(idir):
                                 portable.copyfile(os.path.join(idir, fname),
                                     os.path.join(kdir, fname))
-                        rebuild = True
-
-                if rebuild:
                         self.__rebuild_image_catalogs(progtrack=progtrack)
 
         def set_attrs(self, is_zone, prefix, pub_url,
@@ -486,7 +491,8 @@
                 self.history.log_operation_end()
 
         def is_liveroot(self):
-                return bool(self.root == "/" or DebugValues.get_value("simulate_live_root"))
+                return bool(self.root == "/" or
+                    DebugValues.get_value("simulate_live_root"))
 
         def is_zone(self):
                 return self.cfg_cache.variants[
@@ -698,8 +704,8 @@
 
         def __call_imageplan_evaluate(self, ip, verbose=False):
                 if verbose:
-                        msg(_("Before evaluation:"))
-                        msg(ip)
+                        logger.info(_("Before evaluation:"))
+                        logger.info(ip)
 
                 # A plan can be requested without actually performing an
                 # operation on the image.
@@ -719,7 +725,7 @@
                             ip.get_plan(full=False)
 
                 if verbose:
-                        msg(_("After evaluation:"))
+                        logger.info(_("After evaluation:"))
                         ip.display()
 
         def image_change_variant(self, variants, progtrack, check_cancelation,
@@ -736,7 +742,7 @@
 
                 if not variants:
                         self.__call_imageplan_evaluate(ip, verbose)
-                        msg("No variant changes.")
+                        logger.info("No variant changes.")
                         return
 
                 #
@@ -789,7 +795,7 @@
                 # api::__check_cancelation() function.
                 pps = []
                 for fmri, actions in repairs:
-                        msg("Repairing: %-50s" % fmri.get_pkg_stem())
+                        logger.info("Repairing: %-50s" % fmri.get_pkg_stem())
                         m = self.get_manifest(fmri)
                         pp = pkgplan.PkgPlan(self, progtrack, lambda: False)
                         pp.propose_repair(fmri, m, actions)
@@ -1036,15 +1042,22 @@
                 try:
                         os.makedirs(croot)
                 except EnvironmentError, e:
-                        if e.errno == errno.EACCES:
+                        if e.errno in (errno.EACCES, errno.EROFS):
                                 # Allow operations to work for
                                 # unprivileged users.
                                 croot = None
                         elif e.errno != errno.EEXIST:
                                 raise
 
-                def manifest_cb(f):
-                        return self.get_manifest(f, all_arch=True)
+                def manifest_cb(cat, f):
+                        # Only allow lazy-load for packages from v0 sources.
+                        # Assume entries for other sources have all data
+                        # required in catalog.
+                        entry = cat.get_entry(f)
+                        states = entry["metadata"]["states"]
+                        if self.PKG_STATE_V0 in states:
+                                return self.get_manifest(f, all_arch=True)
+                        return None
 
                 # batch_mode is set to True here as any operations that modify
                 # the catalogs (add or remove entries) are only done during an
@@ -1113,12 +1126,21 @@
                 """Returns a boolean value indicating whether the specified
                 package is installed."""
 
-                cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
+                # Avoid loading the installed catalog if the known catalog
+                # is already loaded.  This is safe since the installed
+                # catalog is a subset of the known, and a specific entry
+                # is being retrieved.
+                if not self.__catalog_loaded(self.IMG_CATALOG_KNOWN):
+                        cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
+                else:
+                        cat = self.get_catalog(self.IMG_CATALOG_KNOWN)
+
                 try:
-                        cat.get_entry(pfmri)
+                        entry = cat.get_entry(pfmri)
                 except api_errors.UnknownCatalogEntry:
                         return False
-                return True
+                states = entry["metadata"]["states"]
+                return self.PKG_STATE_INSTALLED in states
 
         def is_pkg_preferred(self, pfmri):
                 """Compatibility function for use by pkg.client.api only.
@@ -1132,7 +1154,15 @@
                 if pfmri.publisher == self.get_preferred_publisher():
                         return True
 
-                cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
+                # Avoid loading the installed catalog if the known catalog
+                # is already loaded.  This is safe since the installed
+                # catalog is a subset of the known, and a specific entry
+                # is being retrieved.
+                if not self.__catalog_loaded(self.IMG_CATALOG_KNOWN):
+                        cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
+                else:
+                        cat = self.get_catalog(self.IMG_CATALOG_KNOWN)
+
                 try:
                         entry = cat.get_entry(pfmri)
                 except api_errors.UnknownCatalogEntry:
@@ -1249,7 +1279,7 @@
                 # state.
 
                 # All enabled publisher catalogs must be processed.
-                pub_cats = [pub.catalog for pub in publist]
+                pub_cats = [(pub.prefix, pub.catalog) for pub in publist]
 
                 # XXX For backwards compatibility, 'upgradability' of packages
                 # is calculated and stored based on whether a given pkg stem
@@ -1259,7 +1289,7 @@
                 # as it used to be.  In the future, it could likely be improved
                 # by usage of the SAT solver.
                 newest = {}
-                for cat in [old_kcat] + pub_cats:
+                for pfx, cat in [(None, old_kcat)] + pub_cats:
                         for f in cat.fmris():
                                 nver = newest.get(f.pkg_name, None)
                                 newest[f.pkg_name] = max(nver, f.version)
@@ -1299,8 +1329,9 @@
                         mdata["states"] = list(states)
                         return True, mdata
 
-                for cat in pub_cats:
-                        kcat.append(cat, cb=pub_append_cb)
+                for pfx, cat in pub_cats:
+                        kcat.append(cat, cb=pub_append_cb, pubs=[pfx])
+                pub_cats = None
 
                 # Next, add any remaining entries in the previous 'known'
                 # catalog to the new one, but clear PKG_STATE_KNOWN and
@@ -1402,8 +1433,7 @@
                 try:
                         self.check_cert_validity()
                 except api_errors.ExpiringCertificate, e:
-                        # XXX need client messaging framework
-                        misc.emsg(e)
+                        logger.error(str(e))
 
                 pubs_to_refresh = []
 
@@ -1710,12 +1740,11 @@
                         self.__catalogs[self.IMG_CATALOG_KNOWN] = kcat
                         self.__catalogs[self.IMG_CATALOG_INSTALLED] = icat
 
-                        # XXX This awaits a proper messaging framework.
                         # Raising an exception here would be a decidedly
                         # bad thing as it would disrupt find_root, etc.
-                        emsg("Package operation performance is currently "
-                            "degraded.\nThis can be resolved by executing "
-                            "'pkg refresh' as a privileged user.\n")
+                        logger.warning("Package operation performance is "
+                            "currently degraded.\nThis can be resolved by "
+                            "executing 'pkg refresh' as a privileged user.\n")
                         self.history.log_operation_end()
                         return
 
@@ -2126,7 +2155,7 @@
 
                 if not self.is_user_cache_dir and \
                     self.cfg_cache.get_policy(imageconfig.FLUSH_CONTENT_CACHE):
-                        msg("Deleting content cache")
+                        logger.info("Deleting content cache")
                         shutil.rmtree(self.dl_cache_dir, True)
 
         def __salvagedir(self, path):
@@ -2147,9 +2176,8 @@
                 better error passback mechanism. Path is rooted in /...."""
 
                 sdir = self.__salvagedir(path)
-                # XXX need a better way to do this.
-                emsg("\nWarning - directory %s not empty - contents preserved "
-                        "in %s" % (path, sdir))
+                logger.warning("\nWarning - directory %s not empty - contents "
+                    "preserved in %s" % (path, sdir))
 
         def temporary_dir(self):
                 """create a temp directory under image directory for various
@@ -2163,6 +2191,9 @@
                         if e.errno == errno.EACCES:
                                 raise api_errors.PermissionsException(
                                     e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
                         raise
 
         def temporary_file(self):
@@ -2178,6 +2209,9 @@
                         if e.errno == errno.EACCES:
                                 raise api_errors.PermissionsException(
                                     e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
                         raise
                 return name
 
--- a/src/modules/client/imageconfig.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/imageconfig.py	Fri Oct 23 17:43:37 2009 -0500
@@ -29,14 +29,15 @@
 import platform
 import re
 
+from pkg.client import global_settings
+logger = global_settings.logger
+
 import pkg.client.api_errors as api_errors
 import pkg.client.publisher as publisher
 import pkg.fmri as fmri
 import pkg.portable as portable
 import pkg.variant as variant
 
-from pkg.misc import emsg
-
 # The default_policies dictionary defines the policies that are supported by 
 # pkg(5) and their default values. Calls to the ImageConfig.get_policy method
 # should use the constants defined here.
@@ -319,6 +320,9 @@
                                 if e.errno == errno.EACCES:
                                         raise api_errors.PermissionsException(
                                             e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
                                 raise
                         acp.write(f)
 
@@ -463,9 +467,8 @@
                         else:
                                 ssl_key = os.path.abspath(ssl_key)
                         if not os.path.exists(ssl_key):
-                                # XXX need client messaging framework
-                                emsg(api_errors.NoSuchKey(ssl_key, uri=origin,
-                                    publisher=prefix))
+                                logger.error(api_errors.NoSuchKey(ssl_key,
+                                    uri=origin, publisher=prefix))
                                 ssl_key = None
 
                 if ssl_cert:
@@ -475,9 +478,8 @@
                         else:
                                 ssl_cert = os.path.abspath(ssl_cert)
                         if not os.path.exists(ssl_cert):
-                                # XXX need client messaging framework
-                                emsg(api_errors.NoSuchCertificate(ssl_cert,
-                                    uri=origin, publisher=prefix))
+                                logger.error(api_errors.NoSuchCertificate(
+                                    ssl_cert, uri=origin, publisher=prefix))
                                 ssl_cert = None
 
                 r = publisher.Repository(**repo_data)
--- a/src/modules/client/imageplan.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/imageplan.py	Fri Oct 23 17:43:37 2009 -0500
@@ -29,6 +29,9 @@
 import errno
 import traceback
 
+from pkg.client import global_settings
+logger = global_settings.logger
+
 import pkg.actions
 import pkg.client.actuator as actuator
 import pkg.client.api_errors as api_errors
@@ -39,7 +42,6 @@
 import pkg.search_errors as se
 
 from pkg.client.filter import compile_filter
-from pkg.misc import msg
 
 UNEVALUATED       = 0 # nothing done yet
 EVALUATED_PKGS    = 1 # established fmri changes
@@ -143,8 +145,8 @@
 
         def display(self):
                 for pp in self.pkg_plans:
-                        msg("%s -> %s" % (pp.origin_fmri, pp.destination_fmri))
-                msg("Actuators:\n%s" % self.actuators)
+                        logger.info("%s -> %s" % (pp.origin_fmri, pp.destination_fmri))
+                logger.info("Actuators:\n%s" % self.actuators)
 
         def is_proposed_fmri(self, pfmri):
                 for pf in self.target_fmris:
@@ -282,7 +284,6 @@
 
         def evaluate_fmri(self, pfmri):
                 self.progtrack.evaluate_progress(pfmri)
-                self.image.state.set_target(pfmri, self.__intent)
 
                 if self.check_cancelation():
                         raise api_errors.CanceledException()
@@ -363,8 +364,6 @@
                         self.propose_fmri(cf)
                         self.evaluate_fmri(cf)
 
-                self.image.state.set_target()
-
         def add_pkg_plan(self, pfmri):
                 """add a pkg plan to imageplan for fully evaluated frmi"""
                 m = self.image.get_manifest(pfmri)
@@ -380,7 +379,7 @@
                         try:
                                 pp.propose_destination(pfmri, m)
                         except RuntimeError:
-                                msg("pkg: %s already installed" % pfmri)
+                                logger.info("pkg: %s already installed" % pfmri)
                                 return
 
                 pp.evaluate(self.old_excludes, self.new_excludes)
@@ -419,7 +418,7 @@
                         pp.propose_removal(pfmri, m)
                 except RuntimeError:
                         self.image.state.set_target()
-                        msg("pkg %s not installed" % pfmri)
+                        logger.info("pkg %s not installed" % pfmri)
                         return
 
                 pp.evaluate([], self.old_excludes)
@@ -662,6 +661,9 @@
                                 if e.errno == errno.EACCES:
                                         raise api_errors.PermissionsException(
                                             e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
                                 raise
 
                         self.progtrack.download_done()
--- a/src/modules/client/pkgplan.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/pkgplan.py	Fri Oct 23 17:43:37 2009 -0500
@@ -27,13 +27,13 @@
 import itertools
 import os
 
+from pkg.client import global_settings
+logger = global_settings.logger
+
 import pkg.actions.directory as directory
 import pkg.manifest as manifest
 import pkg.client.api_errors as api_errors
-from pkg.misc import msg
-from pkg.misc import get_pkg_otw_size
-from pkg.misc import EmptyI
-from pkg.misc import expanddirs
+from pkg.misc import expanddirs, get_pkg_otw_size, EmptyI
 
 class PkgPlan(object):
         """A package plan takes two package FMRIs and an Image, and produces the
@@ -270,8 +270,8 @@
                 try:
                         dest.install(self, src)
                 except Exception, e:
-                        msg("Action install failed for '%s' (%s):\n  %s: %s" % \
-                            (dest.attrs.get(dest.key_attr, id(dest)),
+                        logger.error("Action install failed for '%s' (%s):\n  "
+                            "%s: %s" % (dest.attrs.get(dest.key_attr, id(dest)),
                              self.destination_fmri.get_pkg_stem(),
                              e.__class__.__name__, e))
                         raise
@@ -281,8 +281,8 @@
                 try:
                         dest.install(self, src)
                 except Exception, e:
-                        msg("Action upgrade failed for '%s' (%s):\n %s: %s" % \
-                             (dest.attrs.get(dest.key_attr, id(dest)),
+                        logger.error("Action upgrade failed for '%s' (%s):\n "
+                            "%s: %s" % (dest.attrs.get(dest.key_attr, id(dest)),
                              self.destination_fmri.get_pkg_stem(),
                              e.__class__.__name__, e))
                         raise
@@ -292,8 +292,8 @@
                 try:
                         src.remove(self)
                 except Exception, e:
-                        msg("Action removal failed for '%s' (%s):\n  %s: %s" % \
-                            (src.attrs.get(src.key_attr, id(src)),
+                        logger.error("Action removal failed for '%s' (%s):\n "
+                            "%s: %s" % (src.attrs.get(src.key_attr, id(src)),
                              self.origin_fmri.get_pkg_stem(),
                              e.__class__.__name__, e))
                         raise
--- a/src/modules/client/progress.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/progress.py	Fri Oct 23 17:43:37 2009 -0500
@@ -29,7 +29,11 @@
 import sys
 import os
 import time
-from pkg.misc import PipeError, emsg
+
+from pkg.client import global_settings
+logger = global_settings.logger
+
+from pkg.misc import PipeError
 import pkg.portable as portable
 
 class ProgressTracker(object):
@@ -180,14 +184,17 @@
                         self.dl_output_done()
 
                 if self.dl_cur_npkgs != self.dl_goal_npkgs:
-                        emsg("\nExpected %s pkgs, received %s pkgs instead." %
-                            (self.dl_goal_npkgs, self.dl_cur_npkgs))
+                        logger.error("\nExpected %s pkgs, received %s pkgs "
+                            "instead." % (self.dl_goal_npkgs,
+                            self.dl_cur_npkgs))
                 if self.dl_cur_nfiles != self.dl_goal_nfiles:
-                        emsg("\nExpected %s files, received %s files instead."
-                            % (self.dl_goal_nfiles, self.dl_cur_nfiles))
+                        logger.error("\nExpected %s files, received %s files "
+                            "instead." % (self.dl_goal_nfiles,
+                            self.dl_cur_nfiles))
                 if self.dl_cur_nbytes != self.dl_goal_nbytes:
-                        emsg("\nExpected %s bytes, received %s bytes instead."
-                            % (self.dl_goal_nbytes, self.dl_cur_nbytes))
+                        logger.error("\nExpected %s bytes, received %s bytes "
+                            "instead." % (self.dl_goal_nbytes,
+                            self.dl_cur_nbytes))
 
                 assert self.dl_cur_npkgs == self.dl_goal_npkgs
                 assert self.dl_cur_nfiles == self.dl_goal_nfiles
--- a/src/modules/client/publisher.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/publisher.py	Fri Oct 23 17:43:37 2009 -0500
@@ -39,14 +39,20 @@
 import datetime as dt
 import errno
 import os
+import shutil
+import tempfile
+import time
+import urlparse
+
+from pkg.client import global_settings
+logger = global_settings.logger
+
 import pkg.catalog
 import pkg.client.api_errors as api_errors
 import pkg.misc as misc
 import pkg.portable as portable
+import pkg.server.catalog as old_catalog
 import pkg.Uuid25
-import shutil
-import time
-import urlparse
 
 # The "core" type indicates that a repository contains all of the dependencies
 # declared by packages in the repository.  It is primarily used for operating
@@ -918,6 +924,56 @@
         def __str__(self):
                 return self.prefix
 
+        def __validate_metadata(self):
+                """Private helper function to check the publisher's metadata
+                for configuration or other issues and log appropriate warnings
+                or errors.  Currently only checks catalog metadata."""
+
+                c = self.catalog
+                if not c.exists:
+                        # Nothing to validate.
+                        return
+                if not c.version > 0:
+                        # Validation doesn't apply.
+                        return
+                if not c.package_count:
+                        # Nothing to do.
+                        return
+
+                # XXX For now, perform this check using the catalog data.
+                # In the future, it should be done using the output of the
+                # publisher/0 operation.
+                pubs = self.catalog.publishers()
+
+                if self.prefix not in pubs:
+                        origin = self.selected_repository.origins[0]
+                        logger.warning(_("\nThe catalog retrieved for "
+                            "publisher '%(prefix)s' only contains package data "
+                            "for these publisher(s): %(pubs)s.  To resolve "
+                            "this issue, update this publisher to use the "
+                            "correct repository origin, or add one of the "
+                            "listed publishers using this publisher's "
+                            "repository origin."
+                            "\n\n"
+                            "To correct the repository origin, execute the "
+                            "following command as a privileged user:"
+                            "\n\n"
+                            "pkg set-publisher -O <url> %(prefix)s"
+                            "\n\n"
+                            "To add a new publisher using this publisher's "
+                            "repository origin, execute the following command "
+                            "as a privileged user:"
+                            "\n\n"
+                            "pkg set-publisher -O %(origin)s <publisher>"
+                            "\n\n"
+                            "After the new publisher has been added, this one "
+                            "should be removed by executing the following "
+                            "command as a privileged user:"
+                            "\n\n"
+                            "pkg unset-publisher %(prefix)s\n") % {
+                            "origin": origin, "prefix": self.prefix,
+                            "pubs": ", ".join(pubs) })
+
         def add_repository(self, repository):
                 """Adds the provided repository object to the publisher and
                 sets it as the selected one if no repositories exist."""
@@ -979,6 +1035,9 @@
                                 if e.errno == errno.EACCES:
                                         raise api_errors.PermissionsException(
                                             e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
                                 elif e.errno != errno.EEXIST:
                                         # If the path already exists, move on.
                                         # Otherwise, raise the exception.
@@ -1086,33 +1145,16 @@
                 v1_cat.save()
                 self.__catalog = v1_cat
 
-        def refresh(self, full_refresh=False, immediate=False):
-                """Refreshes the publisher's metadata, returning a boolean
-                value indicating whether any updates to the publisher's
-                metadata occurred.
-
-                'full_refresh' is an optional boolean value indicating whether
-                a full retrieval of publisher metadata (e.g. catalogs) or only
-                an update to the existing metadata should be performed.  When
-                True, 'immediate' is also set to True.
-
-                'immediate' is an optional boolean value indicating whether
-                a refresh should occur now.  If False, a publisher's selected
-                repository will be checked for updates only if needs_refresh
-                is True."""
-
-                assert self.catalog_root
-                assert self.transport
+        def __refresh_v0(self, full_refresh, immediate):
+                """The method to refresh the publisher's metadata against
+                a catalog/0 source.  If the more recent catalog/1 version
+                isn't supported, this routine gets invoked as a fallback."""
 
                 if full_refresh:
                         immediate = True
 
-                # Ensure consistent directory structure.
-                self.create_meta_root()
-
-                # XXX Assumes Catalog needs v0 -> v1 transformation as
-                # repositories only offer v0 catalogs currently.
-                import pkg.server.catalog as old_catalog
+                # Catalog needs v0 -> v1 transformation if repository only
+                # offers v0 catalog.
                 v0_cat = old_catalog.ServerCatalog(self.catalog_root,
                     read_only=True, publisher=self.prefix)
 
@@ -1127,6 +1169,9 @@
                                         if e.errno == errno.EACCES:
                                                 raise api_errors.PermissionsException(
                                                     e.filename)
+                                        if e.errno == errno.EROFS:
+                                                raise api_errors.ReadOnlyFileSystemException(
+                                                    e.filename)
                                         raise
                                 immediate = True
                         else:
@@ -1151,6 +1196,9 @@
                                 if e.errno == errno.EACCES:
                                         raise api_errors.PermissionsException(
                                             e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
                                 raise
                         self.transport.get_catalog(self)
 
@@ -1167,6 +1215,193 @@
                         return True
                 return False
 
+        def __refresh_v1(self, tempdir, full_refresh, immediate):
+                """The method to refresh the publisher's metadata against
+                a catalog/1 source.  If the more recent catalog/1 version
+                isn't supported, __refresh_v0 is invoked as a fallback."""
+
+                try:
+                        self.transport.get_catalog1(self, ["catalog.attrs"],
+                            path=tempdir)
+                except api_errors.UnsupportedRepositoryOperation:
+                        # No v1 catalogs available.
+                        return self.__refresh_v0(full_refresh, immediate)
+
+                # If this succeeded, we now have a catalog.attrs file.  Parse
+                # this to determine what other constituent parts need to be
+                # downloaded.
+                flist = []
+                if not full_refresh and self.catalog.exists:
+                        flist = self.catalog.get_updates_needed(tempdir)
+                        if flist == None:
+                                # Catalog has not changed.
+                                self.last_refreshed = dt.datetime.utcnow()
+                                return False
+                else:
+                        attrs = pkg.catalog.CatalogAttrs(meta_root=tempdir)
+                        for name in attrs.parts:
+                                locale = name.split(".", 2)[2]
+                                # XXX Skip parts that aren't in the C locale for
+                                # now.
+                                if locale != "C":
+                                        continue
+                                flist.append(name)
+
+                if flist:
+                        # More catalog files to retrieve.
+                        try:
+                                self.transport.get_catalog1(self, flist,
+                                    path=tempdir)
+                        except api_errors.UnsupportedRepositoryOperation:
+                                # Couldn't find a v1 catalog after getting one
+                                # before.  This would be a bizzare error, but we
+                                # can try for a v0 catalog anyway.
+                                return self.__refresh_v0(full_refresh,
+                                    immediate)
+
+                # At this point the client should have a set of the constituent
+                # pieces that are necessary to construct a catalog.  If a v0
+                # catalog is present, remove it before proceeding.
+                v0_cat = old_catalog.ServerCatalog(self.catalog_root,
+                    read_only=True, publisher=self.prefix)
+
+                if v0_cat.exists:
+                        v0_cat.destroy(root=self.catalog_root)
+
+                # If a catalog already exists, call apply_updates.  Otherwise,
+                # move the files to the appropriate location.
+                revalidate = False
+                if not full_refresh and self.catalog.exists:
+                        self.catalog.apply_updates(tempdir)
+                else:
+
+                        if self.catalog.exists:
+                                # This is a full refresh.  Destroy
+                                # the existing catalog.
+                                self.catalog.destroy()
+                        for fn in os.listdir(tempdir):
+                                srcpath = os.path.join(tempdir, fn)
+                                dstpath = os.path.join(self.catalog_root, fn)
+                                pkg.portable.rename(srcpath, dstpath)
+
+                        # Apply_updates validates the newly constructed catalog.
+                        # If refresh didn't call apply_updates, arrange to
+                        # have the new catalog validated.
+                        revalidate = True
+
+                # Update refresh time.
+                self.last_refreshed = dt.datetime.utcnow()
+
+                # Clear __catalog, so we'll read in the new catalog.
+                self.__catalog = None
+
+                if revalidate:
+                        self.catalog.validate()
+
+                return True
+
+        def __refresh(self, full_refresh, immediate):
+                """The method to handle the overall refresh process.  It
+                determines if a refresh is actually needed, and then calls
+                the first version-specific refresh method in the chain."""
+
+                assert self.catalog_root
+                assert self.transport
+
+                if full_refresh:
+                        immediate = True
+
+                # Ensure consistent directory structure.
+                self.create_meta_root()
+
+                # Check if we already have a v1 catalog on disk.
+                if not full_refresh and self.catalog.exists:
+                        # If catalog is on disk, check if refresh is necessary.
+                        if not immediate and not self.needs_refresh:
+                                # No refresh needed.
+                                return False
+
+                # Create temporary directory for assembly of catalog pieces.
+                try:
+                        tempdir = tempfile.mkdtemp(dir=self.catalog_root)
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise api_errors.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
+                        raise
+
+                # Ensure that the temporary directory gets removed regardless
+                # of success or failure.
+                try:
+                        rval = self.__refresh_v1(tempdir, full_refresh,
+                            immediate)
+
+                        # Perform publisher metadata sanity checks.
+                        self.__validate_metadata()
+
+                        return rval
+                finally:
+                        # Cleanup tempdir.
+                        shutil.rmtree(tempdir, True)
+
+        def refresh(self, full_refresh=False, immediate=False):
+                """Refreshes the publisher's metadata, returning a boolean
+                value indicating whether any updates to the publisher's
+                metadata occurred.
+
+                'full_refresh' is an optional boolean value indicating whether
+                a full retrieval of publisher metadata (e.g. catalogs) or only
+                an update to the existing metadata should be performed.  When
+                True, 'immediate' is also set to True.
+
+                'immediate' is an optional boolean value indicating whether
+                a refresh should occur now.  If False, a publisher's selected
+                repository will be checked for updates only if needs_refresh
+                is True."""
+
+                try:
+                        return self.__refresh(full_refresh, immediate)
+                except (api_errors.BadCatalogUpdateIdentity,
+                    api_errors.DuplicateCatalogEntry,
+                    api_errors.ObsoleteCatalogUpdate,
+                    api_errors.UnknownUpdateType):
+                        if full_refresh:
+                                # Completely unexpected failure.
+                                # These exceptions should never
+                                # be raised for a full refresh
+                                # case anyway, so the error should
+                                # definitely be raised.
+                                raise
+
+                        # The incremental update likely failed for one or
+                        # more of the following reasons:
+                        #
+                        # * The origin for the publisher has changed.
+                        #
+                        # * The catalog that the publisher is offering
+                        #   is now completely different (due to a restore
+                        #   from backup or --rebuild possibly).
+                        #
+                        # * The catalog that the publisher is offering
+                        #   has been restored to an older version, and
+                        #   packages that already exist in this client's
+                        #   copy of the catalog have been re-addded.
+                        #
+                        # * The type of incremental update operation that
+                        #   that was performed on the catalog isn't supported
+                        #   by this version of the client, so a full retrieval
+                        #   is required.
+                        #
+                        return self.__refresh(True, True)
+                except (api_errors.BadCatalogSignatures,
+                    api_errors.InvalidCatalogFile):
+                        # Assembly of the catalog failed, but this could be due
+                        # to a transient error.  So, retry at least once more.
+                        return self.__refresh(True, True)
+
         def remove_meta_root(self):
                 """Removes the publisher's meta_root."""
 
@@ -1180,6 +1415,9 @@
                         if e.errno == errno.EACCES:
                                 raise api_errors.PermissionsException(
                                     e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
                         if e.errno not in (errno.ENOENT, errno.ESRCH):
                                 raise
 
--- a/src/modules/client/transport/engine.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/transport/engine.py	Fri Oct 23 17:43:37 2009 -0500
@@ -106,7 +106,8 @@
                 return ret
 
         def add_url(self, url, filepath=None, writefunc=None, header=None,
-            progtrack=None, sslcert=None, sslkey=None, repourl=None):
+            progtrack=None, sslcert=None, sslkey=None, repourl=None,
+            compressible=False):
                 """Add a URL to the transport engine.  Caller must supply
                 either a filepath where the file should be downloaded,
                 or a callback to a function that will peform the write.
@@ -116,7 +117,8 @@
 
                 t = TransportRequest(url, filepath=filepath,
                     writefunc=writefunc, header=header, progtrack=progtrack,
-                    sslcert=sslcert, sslkey=sslkey, repourl=repourl)
+                    sslcert=sslcert, sslkey=sslkey, repourl=repourl,
+                    compressible=compressible)
 
                 self.__req_q.appendleft(t)
 
@@ -330,7 +332,7 @@
                                 timeout = timeout / 1000.0
 
                         if timeout:
-                               self.__mhandle.select(timeout)
+                                self.__mhandle.select(timeout)
 
                 while self.__freehandles and self.__req_q:
                         t = self.__req_q.pop()
@@ -500,7 +502,11 @@
                                 if e.errno == errno.EACCES:
                                         raise api_errors.PermissionsException(
                                             e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
                                 # Raise OperationError if it's not EACCES
+                                # or EROFS.
                                 raise tx.TransportOperationError(
                                     "Unable to open file: %s" % e)
          
--- a/src/modules/client/transport/exception.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/transport/exception.py	Fri Oct 23 17:43:37 2009 -0500
@@ -305,3 +305,40 @@
                         return r
                 return cmp(self.reason, other.reason)
 
+class PkgProtoError(TransportException):
+        """Raised when the pkg protocol doesn't behave according to
+        specification.  This is different than TransportProtoError, which
+        deals with the L7 protocols that we can use to perform a pkg(5)
+        transport operation.  Although it doesn't exist, this is essentially
+        a L8 error, since our pkg protocol is built on top of application
+        level protocols.  The Framework errors deal with L3-6 errors."""
+
+        def __init__(self, url, operation=None, version=None, reason=None):
+                TransportException.__init__(self)
+                self.url = url
+                self.reason = reason
+                self.operation = operation
+                self.version = version
+        
+        def __str__(self):
+                s = "Invalid pkg(5) response from %s" % self.url
+                if self.operation:
+                        s += ": Attempting operation %s" % self.operation
+                if self.version:
+                        s += " version %s" % self.version
+                if self.reason:
+                        s += ":\n%s" % self.reason
+
+        def __cmp__(self, other):
+                if not isinstance(other, PkgProtoError):
+                        return -1
+                r = cmp(self.url, other.url)
+                if r != 0:
+                        return r
+                r = cmp(self.operation, other.operation)
+                if r != 0:
+                        return r
+                r = cmp(self.version, other.version)
+                if r != 0:
+                        return r
+                return cmp(self.reason, other.reason) 
--- a/src/modules/client/transport/repo.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/transport/repo.py	Fri Oct 23 17:43:37 2009 -0500
@@ -31,6 +31,8 @@
 
 import pkg.client.transport.exception as tx
 
+from email.Utils import formatdate
+
 class TransportRepo(object):
         """The TransportRepo class handles transport requests.
         It represents a repo, and provides the same interfaces as
@@ -93,11 +95,13 @@
                 self._url = repostats.url
                 self._repouri = repouri
                 self._engine = engine
+                self._verdata = None
 
         def _add_file_url(self, url, filepath=None, progtrack=None,
-            header=None):
+            header=None, compress=False):
                 self._engine.add_url(url, filepath=filepath,
-                    progtrack=progtrack, repourl=self._url, header=header)
+                    progtrack=progtrack, repourl=self._url, header=header,
+                    compressible=compress)
 
         def _fetch_url(self, url, header=None, compress=False):
                 return self._engine.get_url(url, header, repourl=self._url,
@@ -111,6 +115,12 @@
                 return self._engine.send_data(url, data, header,
                     repourl=self._url)
 
+        def add_version_data(self, verdict):
+                """Cache the information about what versions a repository
+                supports."""
+
+                self._verdata = verdict
+
         def do_search(self, data, header=None):
                 """Perform a remote search against origin repos."""
 
@@ -152,6 +162,62 @@
 
                 return self._fetch_url(requesturl, header, compress=True)
 
+        def get_catalog1(self, filelist, destloc, header=None, ts=None):
+                """Get the files that make up the catalog components
+                that are listed in 'filelist'.  Download the files to
+                the directory specified in 'destloc'.  The caller
+                may optionally specify a dictionary with header
+                elements in 'header'.  If a conditional get is
+                to be performed, 'ts' should contain a floating point
+                value of seconds since the epoch."""
+
+                methodstr = "catalog/1/"
+                urllist = []
+
+                if ts:
+                        # Convert date to RFC 1123 compliant string
+                        tsstr = formatdate(timeval=ts, localtime=False,
+                            usegmt=True)
+                        if not header:
+                                header = {"If-Modified-Since": tsstr}
+                        else:
+                                header["If-Modified-Since"] = tsstr
+
+                # create URL for requests
+                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
+
+                for f in filelist:
+                        url = urlparse.urljoin(baseurl, f)
+                        urllist.append(url)
+                        fn = os.path.join(destloc, f)
+                        self._add_file_url(url, filepath=fn, header=header,
+                            compress=True)
+
+                while self._engine.pending:
+                        self._engine.run()
+
+                errors = self._engine.check_status(urllist)
+
+                # Transient errors are part of standard control flow.
+                # The repo's caller will look at these and decide whether
+                # to throw them or not.  Permanent failures are raised
+                # by the transport engine as soon as they occur.
+                #
+                # This adds an attribute that describes the request to the
+                # exception, if we were able to figure it out.
+
+                for e in errors:
+                        # When check_status is supplied with a list,
+                        # all exceptions returned will have a url.
+                        # If we didn't do this, we'd need a getattr check.
+                        eurl = e.url
+                        utup = urlparse.urlsplit(eurl)
+                        req = utup[2]
+                        req = os.path.basename(req)
+                        e.request = req
+
+                return errors
+
         def get_datastream(self, fhash, header=None):
                 """Get a datastream from a repo.  The name of the
                 file is given in fhash."""
@@ -230,10 +296,19 @@
                 Returns a fileobject."""
 
                 requesturl = urlparse.urljoin(self._repouri.uri, "versions/0/")
+                return self._fetch_url(requesturl, header)
 
-                resp = self._fetch_url(requesturl, header)
+        def has_version_data(self):
+                """Returns true if this repo knows its version information."""
+
+                return self._verdata is not None
 
-                return resp
+        def supports_version(self, op, ver):
+                """Returns true if operation named in string 'op'
+                supports integer version in 'ver' argument."""
+
+                return self.has_version_data() and \
+                    (op in self._verdata and ver in self._verdata[op])
 
         def touch_manifest(self, mfst, header=None):
                 """Invoke HTTP HEAD to send manifest intent data."""
@@ -264,11 +339,11 @@
 
         # override the download functions to use ssl cert/key
         def _add_file_url(self, url, filepath=None, progtrack=None,
-            header=None):
+            header=None, compress=False):
                 self._engine.add_url(url, filepath=filepath,
                     progtrack=progtrack, sslcert=self._repouri.ssl_cert,
                     sslkey=self._repouri.ssl_key, repourl=self._url,
-                    header=header)
+                    header=header, compressible=compress)
 
         def _fetch_url(self, url, header=None, compress=False):
                 return self._engine.get_url(url, header=header,
--- a/src/modules/client/transport/transport.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/client/transport/transport.py	Fri Oct 23 17:43:37 2009 -0500
@@ -34,6 +34,7 @@
 
 import pkg.fmri
 
+import pkg.catalog as catalog
 import pkg.client.api_errors as apx
 import pkg.client.imageconfig as imageconfig
 import pkg.client.publisher as publisher
@@ -98,7 +99,7 @@
 
                 self.__lock.acquire()
                 try:
-                       fobj = self._do_search(pub, data)
+                        fobj = self._do_search(pub, data)
                 finally:
                         self.__lock.release()
 
@@ -234,6 +235,194 @@
                  
                 raise failures
 
+        @staticmethod
+        def _verify_catalog(filename, dirname):
+                """A wrapper for catalog.verify() that catches
+                BadCatalogSignatures exceptions and translates them to
+                the appropriate InvalidContentException that the transport
+                uses for content verification."""
+
+                filepath = os.path.join(dirname, filename)
+
+                try:
+                        catalog.verify(filepath)
+                except (apx.BadCatalogSignatures, apx.InvalidCatalogFile), e:
+                        os.remove(filepath)
+                        te = tx.InvalidContentException(filepath,
+                            "CatalogPart failed validation: %s" % e)
+                        te.request = filename
+                        raise te
+                return
+
+        def get_catalog1(self, pub, flist, ts=None, path=None):
+                """Get the catalog1 files from publisher 'pub' that
+                are given as a list in 'flist'.  If the caller supplies
+                an optional timestamp argument, only get the files that
+                have been modified since the timestamp.  At the moment,
+                this interface only supports supplying a timestamp
+                if the length of flist is 1.
+
+                The timestamp, 'ts', should be provided as a floating
+                point value of seconds since the epoch in UTC.  If callers
+                have a datetime object, they should use something like:
+
+                time.mktime(dtobj.timetuple()) -> float
+
+                If the caller has a UTC datetime object, the following
+                should be used instead:
+
+                calendar.timegm(dtobj.utctimetuple()) -> float
+
+                The examples above convert the object to the appropriate format
+                for get_catalog1.
+
+                If the caller wants the completed download to be placed
+                in an alternate directory (pub.catalog_root is standard),
+                set a directory path in 'path'."""
+
+                self.__lock.acquire()
+                try:
+                        self._get_catalog1(pub, flist, ts=ts, path=path)
+                finally:
+                        self.__lock.release()
+
+        def _get_catalog1(self, pub, flist, ts=None, path=None):
+                """This is the implementation of get_catalog1.  The
+                other function is a wrapper for this one."""
+
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                failures = []
+                repo_found = False
+                header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                # Ensure that caller only passed one item, if ts was
+                # used.
+                if ts and len(flist) > 1:
+                        raise ValueError("Ts may only be used with a single"
+                            " item flist.")
+
+                # download_dir is temporary download path.  Completed_dir
+                # is the cache where valid content lives.
+                if path:
+                        completed_dir = path
+                else:
+                        completed_dir = pub.catalog_root
+                download_dir = self.__img.incoming_download_dir()
+
+                # If captive portal test hasn't been executed, run it
+                # prior to this operation.
+                self._captive_portal_test()
+
+                # Check if the download_dir exists.  If it doesn't, create
+                # the directories.
+                self._makedirs(download_dir)
+                self._makedirs(completed_dir)
+
+                # Call setup if the transport isn't configured yet.
+                if not self.__engine:
+                        self.__setup()
+
+                # Call statvfs to find the blocksize of download_dir's
+                # filesystem.
+                try:
+                        destvfs = os.statvfs(download_dir)
+                        # Set the file buffer size to the blocksize of our
+                        # filesystem.
+                        self.__engine.set_file_bufsz(destvfs[statvfs.F_BSIZE])
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise apx.PermissionsException(e.filename)
+                        else:
+                                raise tx.TransportOperationError(
+                                    "Unable to stat VFS: %s" % e)
+                except AttributeError, e:
+                        # os.statvfs is not available on Windows
+                        pass
+
+                for d in self.__gen_origins_byversion(pub, retry_count,
+                    "catalog", 1):
+
+                        failedreqs = []
+                        repostats = self.stats[d.get_url()]
+                        repo_found = True
+
+                        # This returns a list of transient errors
+                        # that occurred during the transport operation.
+                        # An exception handler here isn't necessary
+                        # unless we want to supress a permanent failure.
+                        try:
+                                errlist = d.get_catalog1(flist, download_dir,
+                                    header, ts)
+                        except tx.TransportProtoError, e:
+                                # If we've performed a conditional
+                                # request, and it returned 304, raise a
+                                # CatalogNotModified exception here.
+                                if e.code == httplib.NOT_MODIFIED:
+                                        raise apx.CatalogNotModified(e.url)
+                                else:
+                                        raise
+
+                        for e in errlist:
+                                # General case: Fish the request information
+                                # out of the exception, so the transport
+                                # can retry the request at another host.
+                                req = getattr(e, "request", None)
+                                if req:
+                                        failedreqs.append(req)
+                                        failures.append(e)
+                                else:
+                                        raise e
+
+                        if not failedreqs:
+                                success = flist
+                                flist = None
+                        else:
+                                success = [
+                                    x for x in flist
+                                    if x not in failedreqs
+                                ]
+                                flist = failedreqs
+
+                        for s in success:
+                                dl_path = os.path.join(download_dir, s)
+
+                                try:
+                                        self._verify_catalog(s, download_dir)
+                                except tx.InvalidContentException, e:
+                                        repostats.record_error()
+                                        failedreqs.append(e.request)
+                                        failures.append(e)
+                                        if not flist:
+                                                flist = failedreqs
+                                        continue
+
+                                final_path = os.path.normpath(
+                                    os.path.join(completed_dir, s))
+                                    
+                                finaldir = os.path.dirname(final_path)
+
+                                self._makedirs(finaldir)
+                                portable.rename(dl_path, final_path)
+
+                        # Return if everything was successful
+                        if not flist and not errlist:
+                                return
+
+                if not repo_found:
+                        raise apx.UnsupportedRepositoryOperation(pub,
+                            "catalog/1")
+
+                if failedreqs and failures:
+                        failures = [
+                            x for x in failures
+                            if x.request in failedreqs
+                        ]
+                        tfailurex = tx.TransportFailures()
+                        for f in failures:
+                                tfailurex.append(f)
+                        raise tfailurex
+               
+
         def get_datastream(self, fmri, fhash):
                 """Given a fmri, and fhash, return a data stream for the remote
                 object.
@@ -410,13 +599,14 @@
                         try:
                                 os.makedirs(newdir)
                         except EnvironmentError, e:
-                                if e.errno == errno.EACCES or \
-                                    e.errno == errno.EROFS:
+                                if e.errno == errno.EACCES:
                                         raise apx.PermissionsException(
                                             e.filename)
-                                else:
-                                        raise tx.TransportOperationError(
-                                            "Unable to make directory: %s" % e)
+                                if e.errno == errno.EROFS:
+                                        raise apx.ReadOnlyFileSystemException(
+                                            e.filename)
+                                raise tx.TransportOperationError("Unable to "
+                                    "make directory: %s" % e)
 
         def _get_files(self, mfile):
                 """Perform an operation that gets multiple files at once.
@@ -462,7 +652,8 @@
                 # filesystem.
                 try:
                         destvfs = os.statvfs(download_dir)
-                        # set the file buffer size to the blocksize of our filesystem
+                        # Set the file buffer size to the blocksize of our
+                        # filesystem.
                         self.__engine.set_file_bufsz(destvfs[statvfs.F_BSIZE])
                 except EnvironmentError, e:
                         if e.errno == errno.EACCES:
@@ -536,6 +727,11 @@
                 if len(failedreqs) > 0 and len(failures) > 0:
                         failures = filter(lambda x: x.request in failedreqs,
                             failures)
+
+                        failures = [
+                            x for x in failures
+                            if x.request in failedreqs
+                        ]
                         tfailurex = tx.TransportFailures()
                         for f in failures:
                                 tfailurex.append(f)
@@ -558,7 +754,6 @@
 
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 failures = tx.TransportFailures()
-                verlines = None
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
                 # If captive portal test hasn't been executed, run it
@@ -571,18 +766,13 @@
                         # save it if it's retryable, otherwise
                         # raise the error to a higher-level handler.
                         try:
-                                resp = d.get_versions(header)
-                                verlines = resp.readlines()
-
-                                return dict(
-                                    s.split(None, 1)
-                                    for s in (l.strip() for l in verlines)
-                                )
-         
+                                vers = self.__get_version(d, header)        
+                                # Save this information for later use, too.
+                                self.__populate_repo_versions(d, vers)
+                                return vers 
                         except tx.TransportException, e:
                                 if e.retryable:
                                         failures.append(e)
-                                        verlines = None
                                 else:
                                         raise
                         except ValueError:
@@ -592,6 +782,55 @@
 
                 raise failures
 
+        @staticmethod
+        def __get_version(repo, header=None):
+                """An internal method that returns a versions dictionary
+                given a transport repo object."""
+
+                resp = repo.get_versions(header)
+                verlines = resp.readlines()
+
+                return dict(
+                    s.split(None, 1)
+                    for s in (l.strip() for l in verlines)
+                )
+
+        def __populate_repo_versions(self, repo, vers=None):
+                """Download versions information for the transport
+                repository object and store that information inside
+                of it."""
+
+                # Call __get_version to get the version dictionary
+                # from the repo.
+                
+                if not vers:
+                        try:
+                                vers = self.__get_version(repo)
+                        except ValueError:
+                                raise tx.PkgProtoError(repo.get_url(),
+                                    "versions", 0,
+                                    "VaueError while parsing response")
+
+                for key, val in vers.items():
+                        # Don't turn this line into a list of versions.
+                        if key == "pkg-server":
+                                continue
+
+                        try:
+                                versids = [
+                                    int(v)
+                                    for v in val.split()
+                                ]
+                        except ValueError:
+                                raise tx.PkgProtoError(repo.get_url(),
+                                    "versions", 0,
+                                    "Unable to parse version ids.")
+
+                        # Insert the list back into the dictionary.
+                        vers[key] = versids
+
+                repo.add_version_data(vers)
+
         def __gen_origins(self, pub, count):
                 """The pub argument may either be a Publisher or a
                 RepositoryURI object."""
@@ -612,6 +851,37 @@
                         for rs, ruri in rslist:
                                 yield self.__repo_cache.new_repo(rs, ruri)
 
+        def __gen_origins_byversion(self, pub, count, operation, version):
+                """Return origin repos for publisher pub, that support
+                the operation specified as a string in the 'operation'
+                argument.  The operation must support the version
+                given in as an integer to the 'version' argument."""
+
+                if not self.__engine:
+                        self.__setup()
+
+                if isinstance(pub, publisher.Publisher):
+                        origins = pub.selected_repository.origins
+                else:
+                        # If search was invoked with -s option, we'll have
+                        # a RepoURI instead of a publisher.  Convert
+                        # this to a repo uri
+                        origins = [pub]
+
+                for i in xrange(count):
+                        rslist = self.stats.get_repostats(origins)
+                        for rs, ruri in rslist:
+                                repo = self.__repo_cache.new_repo(rs, ruri)
+                                if not repo.has_version_data():
+                                        try:
+                                                self.__populate_repo_versions(
+                                                    repo)
+                                        except tx.TransportException:
+                                                continue
+
+                                if repo.supports_version(operation, version):
+                                        yield repo
+
         def __gen_repos(self, pub, count):
 
                 if not self.__engine:
--- a/src/modules/depotcontroller.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/depotcontroller.py	Fri Oct 23 17:43:37 2009 -0500
@@ -44,22 +44,24 @@
         RUNNING = 2
 
         def __init__(self):
+                self.__auto_port = True
+                self.__cfg_file = None
+                self.__debug_features = {}
+                self.__depot_handle = None
                 self.__depot_path = "/usr/lib/pkg.depotd"
                 self.__depot_content_root = None
-                self.__auto_port = True
+                self.__dir = None
+                self.__disable_ops = None
+                self.__logpath = "/tmp/depot.log"
+                self.__mirror = False
+                self.__output = None
                 self.__port = -1
-                self.__dir = None
+                self.__props = {}
                 self.__readonly = False
                 self.__rebuild = False
                 self.__refresh_index = False
-                self.__mirror = False
-                self.__logpath = "/tmp/depot.log"
-                self.__output = None
-                self.__depot_handle = None
-                self.__cfg_file = None
+                self.__state = self.HALTED
                 self.__writable_root = None
-                self.__state = self.HALTED
-                self.__debug_features = {}
                 return
 
         def set_depotd_path(self, path):
@@ -81,6 +83,16 @@
         def get_port(self):
                 return self.__port
 
+        def clear_property(self, section, prop):
+                del self.__props[section][prop]
+
+        def set_property(self, section, prop, value):
+                self.__props.setdefault(section, {})
+                self.__props[section][prop] = value
+
+        def get_property(self, section, prop):
+                return self.__props.get(section, {}).get(prop)
+
         def set_repodir(self, repodir):
                 self.__dir = repodir
 
@@ -141,9 +153,15 @@
         def unset_debug_feature(self, feature):
                 del self.__debug_features[feature]
 
+        def set_disable_ops(self, ops):
+                self.__disable_ops = ops
+
+        def unset_disable_ops(self):
+                self.__disable_ops = None
+
         def __network_ping(self):
                 try:
-                        c, v = versioned_urlopen(self.get_depot_url(),
+                        versioned_urlopen(self.get_depot_url(),
                             "versions", [0])
                 except urllib2.HTTPError, e:
                         # Server returns NOT_MODIFIED if catalog is up
@@ -199,33 +217,46 @@
                 if self.__debug_features:
                         args.append("--debug=%s" % ",".join(
                             self.__debug_features))
+                if self.__disable_ops:
+                        args.append("--disable-ops=%s" % ",".join(
+                            self.__disable_ops))
+                for section in self.__props:
+                        for prop, val in self.__props[section].iteritems():
+                                args.append("--set-property=%s.%s=%s" %
+                                    (section, prop, val))
                 if self.__writable_root:
                         args.append("--writable-root=%s" % self.__writable_root)
+
+                # Always log access and error information.
+                args.append("--log-access=stdout")
+                args.append("--log-errors=stderr")
+
                 return args
 
         def __initial_start(self):
                 if self.__state != self.HALTED:
-                        raise DepotStateException("Depot already starting or running")
+                        raise DepotStateException("Depot already starting or "
+                            "running")
 
                 # XXX what about stdin and stdout redirection?
                 args = self.get_args()
 
-		if self.__network_ping():
-			raise DepotStateException("A depot (or some " +
-			    "other network process) seems to be " +
-			    "running on port %d already!" % self.__port)
+                if self.__network_ping():
+                        raise DepotStateException("A depot (or some " +
+                            "other network process) seems to be " +
+                            "running on port %d already!" % self.__port)
 
-		self.__state = self.STARTING
+                self.__state = self.STARTING
 
-		self.__output = open(self.__logpath, "w", 0)
+                self.__output = open(self.__logpath, "w", 0)
 
-		self.__depot_handle = subprocess.Popen(args = args,
-                    stdin = subprocess.PIPE,
-		    stdout = self.__output,
-                    stderr = self.__output,
+                self.__depot_handle = subprocess.Popen(args=args,
+                    stdin=subprocess.PIPE,
+                    stdout=self.__output,
+                    stderr=self.__output,
                     close_fds=True)
-		if self.__depot_handle == None:
-			raise DepotStateException("Could not start Depot")
+                if self.__depot_handle == None:
+                        raise DepotStateException("Could not start Depot")
                 
         def start(self):
                 self.__initial_start()
@@ -233,8 +264,8 @@
                 if self.__refresh_index:
                         return
                 
-		sleeptime = 0.05
-		contact = False
+                sleeptime = 0.05
+                contact = False
                 while sleeptime <= 40.0:
                         rc = self.__depot_handle.poll()
                         if rc is not None:
@@ -242,34 +273,34 @@
                                     "unexpectedly while starting "
                                     "(exit code %d)" % rc)
 
-			if self.is_alive():
-				contact = True
-				break
-			time.sleep(sleeptime)
-			sleeptime *= 2
-		
-		if contact == False:
-			self.kill()
-			self.__state = self.HALTED
-			raise DepotStateException("Depot did not respond to "
+                        if self.is_alive():
+                                contact = True
+                                break
+                        time.sleep(sleeptime)
+                        sleeptime *= 2
+                
+                if contact == False:
+                        self.kill()
+                        self.__state = self.HALTED
+                        raise DepotStateException("Depot did not respond to "
                             "repeated attempts to make contact")
 
-		self.__state = self.RUNNING
+                self.__state = self.RUNNING
 
         def start_expected_fail(self):
                 self.__initial_start()
-		
-		sleeptime = 0.05
-		died = False
+                
+                sleeptime = 0.05
+                died = False
                 rc = None
-		while sleeptime <= 10.0:
+                while sleeptime <= 10.0:
 
                         rc = self.__depot_handle.poll()
                         if rc is not None:
-				died = True
-				break
-			time.sleep(sleeptime)
-			sleeptime *= 2
+                                died = True
+                                break
+                        time.sleep(sleeptime)
+                        sleeptime *= 2
                 
                 if died and rc == 2:
                         self.__state = self.HALTED
@@ -297,7 +328,7 @@
                 status = -1
                 #
                 # With sleeptime doubling every loop iter, and capped at
-		# 10.0 secs, the cumulative time waited will be 10 secs.
+                # 10.0 secs, the cumulative time waited will be 10 secs.
                 #
                 sleeptime = 0.05
                 firsttime = True
@@ -305,21 +336,21 @@
                 while sleeptime <= 10.0:
                         status = self.__depot_handle.poll()
                         if status is not None:
-				break
+                                break
 
-			#
-			# No status, Depot process seems to be running
-			# XXX could also check liveness with a kill.
-			#
-			if firsttime:
-				# XXX porting issue
-				os.kill(self.__depot_handle.pid, signal.SIGTERM)
-				firsttime = False
+                        #
+                        # No status, Depot process seems to be running
+                        # XXX could also check liveness with a kill.
+                        #
+                        if firsttime:
+                                # XXX porting issue
+                                os.kill(self.__depot_handle.pid, signal.SIGTERM)
+                                firsttime = False
 
-			time.sleep(sleeptime)
-			sleeptime *= 2
-		else:
-			assert status is None
+                        time.sleep(sleeptime)
+                        sleeptime *= 2
+                else:
+                        assert status is None
                         print >> sys.stderr, \
                             "Depot did not shut down, trying kill -9 %d" % \
                             self.__depot_handle.pid
@@ -377,11 +408,11 @@
                         raise
 
 if __name__ == "__main__":
-        testdir = "/tmp/depotcontrollertest.%d" % os.getpid()
+        __testdir = "/tmp/depotcontrollertest.%d" % os.getpid()
         try:
-                test_func(testdir)
+                test_func(__testdir)
         except KeyboardInterrupt:
                 pass
-        os.system("rm -fr %s" % testdir)
+        os.system("rm -fr %s" % __testdir)
         print "\nDone"
 
--- a/src/modules/manifest.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/manifest.py	Fri Oct 23 17:43:37 2009 -0500
@@ -88,8 +88,8 @@
 
         def __str__(self):
                 r = ""
-                if "fmri" not in self.attributes and self.fmri != None:
-                        r += "set name=fmri value=%s\n" % self.fmri
+                if "pkg.fmri" not in self.attributes and self.fmri != None:
+                        r += "set name=pkg.fmri value=%s\n" % self.fmri
 
                 for act in sorted(self.actions):
                         r += "%s\n" % act
@@ -99,8 +99,8 @@
                 """A generator function that returns the unsorted manifest
                 contents as lines of text."""
 
-                if "fmri" not in self.attributes and self.fmri != None:
-                        yield "set name=fmri value=%s\n" % self.fmri
+                if "pkg.fmri" not in self.attributes and self.fmri != None:
+                        yield "set name=pkg.fmri value=%s\n" % self.fmri
 
                 for act in self.actions:
                         yield "%s\n" % act
@@ -357,13 +357,14 @@
                 """Fill attribute array w/ set action contents."""
                 try:
                         keyvalue = action.attrs["name"]
+                        if keyvalue == "fmri":
+                                keyvalue = "pkg.fmri"
                         if keyvalue not in self.attributes:
                                 self.attributes[keyvalue] = \
                                     action.attrs["value"]
                 except KeyError: # ignore broken set actions
                         pass
 
-                
         @staticmethod
         def search_dict(file_path, excludes, return_line=False,
             log=None):
@@ -638,16 +639,6 @@
                 """Finish loading.... this part of initialization is common 
                 to multiple code paths"""
                 self.loaded = True
-                # this needs to change; we should not modify on-disk manifest
-                if self.fmri and "publisher" not in self.attributes:
-                        if not self.fmri.has_publisher():
-                                pub = self.__pub
-                        else:
-                                pub = self.fmri.get_publisher()
-
-                        # This shouldn't be set unless available.
-                        if pub:
-                                Manifest.__setitem__(self, "publisher", pub)
 
         def __storeback(self):
                 """ store the current action set; also create per-type
--- a/src/modules/misc.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/misc.py	Fri Oct 23 17:43:37 2009 -0500
@@ -691,6 +691,8 @@
                             publisher=pub)
                 if e.errno == errno.EACCES:
                         raise api_errors.PermissionsException(e.filename)
+                if e.errno == errno.EROFS:
+                        raise api_errors.ReadOnlyFileSystemException(e.filename)
                 raise
 
         try:
--- a/src/modules/publish/dependencies.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/publish/dependencies.py	Fri Oct 23 17:43:37 2009 -0500
@@ -192,7 +192,7 @@
 
         if mfst is None:
                 return urllib.unquote(os.path.basename(fp))
-        name = mfst.get("fmri", None)
+        name = mfst.get("pkg.fmri", mfst.get("fmri", None))
         if name is not None:
                 return name
         return urllib.unquote(os.path.basename(fp))
--- a/src/modules/publish/transaction.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/publish/transaction.py	Fri Oct 23 17:43:37 2009 -0500
@@ -34,11 +34,9 @@
 import urllib2
 import urlparse
 
-from pkg.misc import versioned_urlopen
+from pkg.misc import versioned_urlopen, EmptyDict
 import pkg.portable.util as os_util
-import pkg.server.catalog as catalog
-import pkg.server.config as config
-import pkg.server.repository as repo
+import pkg.server.repository as sr
 import pkg.server.repositoryconfig as rc
 
 class TransactionError(Exception):
@@ -54,6 +52,11 @@
                 return str(self.data)
 
 
+class TransactionRepositoryConfigError(TransactionError):
+        """Used to indicate that the configuration information for the
+        destination repository is invalid or is missing required values."""
+
+
 class TransactionRepositoryURLError(TransactionError):
         """Used to indicate the specified repository URL is not valid or is not
         supported (e.g. because of the scheme).
@@ -102,6 +105,11 @@
                     self.args.get("msg", "")
 
 
+class TransactionRepositoryInvalidError(TransactionError):
+        """Used to indicate that the specified repository is not valid or can
+        not be found at the requested location."""
+
+
 class UnsupportedRepoTypeOperationError(TransactionError):
         """Used to indicate that a requested operation is not supported for the
         type of repository being operated on (http, file, etc.)."""
@@ -120,7 +128,7 @@
         __repo_cache = {}
 
         def __init__(self, origin_url, create_repo=False, pkg_name=None,
-            trans_id=None):
+            repo_props=EmptyDict, trans_id=None):
                 scheme, netloc, path, params, query, fragment = \
                     urlparse.urlparse(origin_url, "file", allow_fragments=0)
                 path = urllib.url2pathname(path)
@@ -132,37 +140,27 @@
                             msg=_("Not an absolute path."))
 
                 if origin_url not in repo_cache:
-                        scfg = config.SvrConfig(path, None, None,
-                            auto_create=create_repo)
                         try:
-                                scfg.init_dirs()
-                        except (config.SvrConfigError, EnvironmentError), e:
+                                repo = sr.Repository(auto_create=create_repo,
+                                    properties=repo_props, repo_root=path)
+                        except EnvironmentError, e:
                                 raise TransactionOperationError(None, msg=_(
                                     "An error occurred while trying to "
                                     "initialize the repository directory "
                                     "structures:\n%s") % e)
-
-                        scfg.acquire_in_flight()
-
-                        try:
-                                scfg.acquire_catalog()
-                        except catalog.CatalogPermissionsException, e:
+                        except rc.PropertyError, e:
+                                raise TransactionRepositoryConfigError(str(e))
+                        except sr.RepositoryInvalidError, e:
+                                raise TransactionRepositoryInvalidError(str(e))
+                        except sr.RepositoryError, e:
                                 raise TransactionOperationError(None,
-                                    origin_url, msg=str(e))
-
-                        try:
-                                repo_cache[origin_url] = repo.Repository(scfg)
-                        except rc.InvalidAttributeValueError, e:
-                                raise TransactionOperationError(None,
-                                    msg=_("The specified repository's "
-                                    "configuration data is not "
-                                    "valid:\n%s") % e)
+                                    msg=str(e))
+                        repo_cache[origin_url] = repo
 
                 self.__repo = repo_cache[origin_url]
                 self.origin_url = origin_url
                 self.pkg_name = pkg_name
                 self.trans_id = trans_id
-                return
 
         def add(self, action):
                 """Adds an action and its related content to an in-flight
@@ -170,7 +168,7 @@
 
                 try:
                         self.__repo.add(self.trans_id, action)
-                except repo.RepositoryError, e:
+                except sr.RepositoryError, e:
                         raise TransactionOperationError("add",
                             trans_id=self.trans_id, msg=str(e))
 
@@ -191,14 +189,14 @@
                         try:
                                 pkg_fmri = None
                                 pkg_state = self.__repo.abandon(self.trans_id)
-                        except repo.RepositoryError, e:
+                        except sr.RepositoryError, e:
                                 raise TransactionOperationError("abandon",
                                     trans_id=self.trans_id, msg=str(e))
                 else:
                         try:
                                 pkg_fmri, pkg_state = self.__repo.close(
                                     self.trans_id, refresh_index=refresh_index)
-                        except repo.RepositoryError, e:
+                        except sr.RepositoryError, e:
                                 raise TransactionOperationError("close",
                                     trans_id=self.trans_id, msg=str(e))
                 return pkg_fmri, pkg_state
@@ -210,7 +208,7 @@
                 try:
                         self.trans_id = self.__repo.open(
                             os_util.get_os_release(), self.pkg_name)
-                except repo.RepositoryError, e:
+                except sr.RepositoryError, e:
                         raise TransactionOperationError("open",
                             trans_id=self.trans_id, msg=str(e))
                 return self.trans_id
@@ -221,7 +219,7 @@
 
                 try:
                         self.__repo.refresh_index()
-                except repo.RepositoryError, e:
+                except sr.RepositoryError, e:
                         raise TransactionOperationError("refresh_index",
                             msg=str(e))
 
@@ -230,7 +228,7 @@
         """Provides a publishing interface for HTTP(S)-based repositories."""
 
         def __init__(self, origin_url, create_repo=False, pkg_name=None,
-            trans_id=None):
+            repo_props=EmptyDict, trans_id=None):
 
                 if create_repo:
                         scheme, netloc, path, params, query, fragment = \
@@ -242,7 +240,6 @@
                 self.origin_url = origin_url
                 self.pkg_name = pkg_name
                 self.trans_id = trans_id
-                return
 
         @staticmethod
         def __get_urllib_error(e):
@@ -448,12 +445,11 @@
         purposes."""
 
         def __init__(self, origin_url, create_repo=False, pkg_name=None,
-            trans_id=None):
+            repo_props=EmptyDict, trans_id=None):
                 self.create_repo = create_repo
                 self.origin_url = origin_url
                 self.pkg_name = pkg_name
                 self.trans_id = trans_id
-                return
 
         @staticmethod
         def add(action):
@@ -520,7 +516,7 @@
         }
 
         def __new__(cls, origin_url, create_repo=False, pkg_name=None,
-            trans_id=None, noexecute=False):
+            repo_props=EmptyDict, trans_id=None, noexecute=False):
                 scheme, netloc, path, params, query, fragment = \
                     urlparse.urlparse(origin_url, "http", allow_fragments=0)
                 scheme = scheme.lower()
@@ -540,4 +536,4 @@
 
                 return cls.__schemes[scheme](origin_url,
                     create_repo=create_repo, pkg_name=pkg_name,
-                    trans_id=trans_id)
+                    repo_props=repo_props, trans_id=trans_id)
--- a/src/modules/server/api.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/api.py	Fri Oct 23 17:43:37 2009 -0500
@@ -26,14 +26,14 @@
 
 import cherrypy
 import itertools
-import pkg.server.catalog
+import pkg.catalog
 import pkg.fmri
-import pkg.version
 import pkg.server.api_errors as api_errors
+import pkg.server.repository as srepo
 import pkg.server.query_parser as qp
 import pkg.version as version
 
-CURRENT_API_VERSION = 5
+CURRENT_API_VERSION = 6
 
 class BaseInterface(object):
         """This class represents a base API object that is provided by the
@@ -43,61 +43,63 @@
         needed by interfaces to provide functionality to clients.
         """
 
-        # A semi-private reference to a cherrypy request object.
-        _request = None
-        # A semi-private reference to a SvrConfig object.
-        _svrconfig = None
-        # A semi-private reference to a RepositoryConfig object.
-        _rcconfig = None
+        def __init__(self, request, repo, content_root=None, web_root=None):
+                # A protected reference to a pkg.server.repository object.
+                self._repo = repo
 
-        def __init__(self, request, svrconfig, rcconfig):
+                # A protected reference to a cherrypy request object.
                 self._request = request
-                self._svrconfig = svrconfig
-                self._rcconfig = rcconfig
+
+                # BUI-specific paths.
+                self._content_root = content_root
+                self._web_root = web_root
 
 class _Interface(object):
         """Private base class used for api interface objects.
         """
         def __init__(self, version_id, base):
-                compatible_versions = set([3, 4, 5])
+                compatible_versions = set([6])
                 if version_id not in compatible_versions:
                         raise api_errors.VersionException(CURRENT_API_VERSION,
                             version_id)
 
+                self._repo = base._repo
+                self._request = base._request
+                self._content_root = base._content_root
+                self._web_root = base._web_root
+
 class CatalogInterface(_Interface):
         """This class presents an interface to server catalog objects that
         clients may use.
         """
 
-        def __init__(self, version_id, base):
-                _Interface.__init__(self, version_id, base)
-                catalog = None
-                if not base._svrconfig.is_mirror():
-                        catalog = base._svrconfig.catalog
-                self.__catalog = catalog
-
         def fmris(self):
-                """Returns a list of FMRIs as it iterates over the contents of
-                the server's catalog.  Returns an empty list if the catalog is
-                not available.
-                """
-                if not self.__catalog:
-                        return []
-                return self.__catalog.fmris()
+                """A generator function that produces FMRIs as it iterates
+                over the contents of the server's catalog."""
+                try:
+                        c = self._repo.catalog
+                except srepo.RepositoryMirrorError:
+                        return iter(())
+                return self._repo.catalog.fmris()
 
         def get_matching_pattern_fmris(self, patterns):
-                """Returns a sorted list of PkgFmri objects, newest versions
-                first, for packages matching those found in the 'patterns' list.
+                """Returns a tuple of a sorted list of PkgFmri objects, newest
+                versions first, for packages matching those found in the
+                'patterns' list, and a dict of unmatched patterns indexed by
+                match criteria.
                 """
-                c = self.__catalog
-                if not c:
-                        return []
-                return pkg.server.catalog.extract_matching_fmris(c.fmris(),
+                try:
+                        c = self._repo.catalog
+                except srepo.RepositoryMirrorError:
+                        return tuple(), {}
+                return pkg.catalog.extract_matching_fmris(c.fmris(),
                     patterns=patterns)
 
         def get_matching_version_fmris(self, versions):
-                """Returns a sorted list of PkgFmri objects, newest versions
-                first, for packages matching those found in the 'versions' list.
+                """Returns a tuple of a sorted list of PkgFmri objects, newest
+                versions first, for packages matching those found in the
+                'versions' list, and a dict of unmatched versions indexed by
+                match criteria.
 
                 'versions' should be a list of strings of the format:
                     release,build_release-branch:datetime
@@ -107,11 +109,11 @@
                 as wildcard characters ('*' for one or more characters, '?' for
                 a single character).
                 """
-                c = self.__catalog
-                if not c:
-                        return []
-
-                return pkg.server.catalog.extract_matching_fmris(c.fmris(),
+                try:
+                        c = self._repo.catalog
+                except srepo.RepositoryMirrorError:
+                        return tuple(), {}
+                return pkg.catalog.extract_matching_fmris(c.fmris(),
                     versions=versions)
 
         @property
@@ -120,21 +122,33 @@
                 which the catalog was last modified.  Returns None if not
                 available.
                 """
-                if not self.__catalog:
+                try:
+                        c = self._repo.catalog
+                except srepo.RepositoryMirrorError:
                         return None
-                lm = self.__catalog.last_modified()
-                if not lm:
-                        return None
-                return pkg.server.catalog.ts_to_datetime(lm)
+                return c.last_modified
 
         @property
         def package_count(self):
                 """The total number of packages in the catalog.  Returns None
                 if the catalog is not available.
                 """
-                if not self.__catalog:
+                try:
+                        c = self._repo.catalog
+                except srepo.RepositoryMirrorError:
                         return None
-                return self.__catalog.npkgs()
+                return c.package_count
+
+        @property
+        def package_version_count(self):
+                """The total number of package versions in the catalog.  Returns
+                None if the catalog is not available.
+                """
+                try:
+                        c = self._repo.catalog
+                except srepo.RepositoryMirrorError:
+                        return None
+                return c.package_version_count
 
         def search(self, tokens, case_sensitive=False,
             return_type=qp.Query.RETURN_PACKAGES, start_point=None,
@@ -249,22 +263,25 @@
                         # can be immediately raised.
                         query = qp.Query(" ".join(tokens), case_sensitive,
                             return_type, None, None)
-                        results = self.__catalog.search(query)
+                        res_list = self._repo.search([str(query)])
+                        if not res_list:
+                                return
 
-                        return filtered_search(results, mver)
+                        return filtered_search(res_list[0], mver)
 
                 query = qp.Query(" ".join(tokens), case_sensitive,
                     return_type, num_to_return, start_point)
-                return self.__catalog.search(query)
+                res_list = self._repo.search([str(query)])
+                if not res_list:
+                        return
+                return res_list[0]
 
         @property
         def search_available(self):
                 """Returns a Boolean value indicating whether search
                 functionality is available for the catalog.
                 """
-                if not self.__catalog:
-                        return False
-                return self.__catalog.search_available()
+                return self._repo.search_available
 
 
 class ConfigInterface(_Interface):
@@ -272,96 +289,90 @@
         information and statistics about the depot that clients may use.
         """
 
-        def __init__(self, version_id, base):
-                _Interface.__init__(self, version_id, base)
-                self.__svrconfig = base._svrconfig
-                self.__rcconfig = base._rcconfig
-
         @property
         def catalog_requests(self):
                 """The number of /catalog operation requests that have occurred
                 during the current server session.
                 """
-                return self.__svrconfig.catalog_requests
+                return self._repo.catalog_requests
 
         @property
         def content_root(self):
                 """The file system path where the server's content and web
                 directories are located.
                 """
-                return self.__svrconfig.content_root
+                return self._content_root
 
         @property
         def file_requests(self):
                 """The number of /file operation requests that have occurred
                 during the current server session.
                 """
-                return self.__svrconfig.file_requests
+                return self._repo.file_requests
 
         @property
         def filelist_requests(self):
                 """The number of /filelist operation requests that have occurred
                 during the current server session.
                 """
-                return self.__svrconfig.flist_requests
+                return self._repo.flist_requests
 
         @property
         def filelist_file_requests(self):
                 """The number of files served by /filelist operations requested
                 during the current server session.
                 """
-                return self.__svrconfig.flist_files
+                return self._repo.flist_files
 
         @property
         def in_flight_transactions(self):
                 """The number of package transactions awaiting completion.
                 """
-                return len(self.__svrconfig.in_flight_trans)
+                return self._repo.in_flight_transactions
 
         @property
         def manifest_requests(self):
                 """The number of /manifest operation requests that have occurred
                 during the current server session.
                 """
-                return self.__svrconfig.manifest_requests
+                return self._repo.manifest_requests
 
         @property
         def mirror(self):
                 """A Boolean value indicating whether the server is currently
                 operating in mirror mode.
                 """
-                return self.__svrconfig.mirror
+                return self._repo.mirror
 
         @property
         def readonly(self):
                 """A Boolean value indicating whether the server is currently
                 operating in readonly mode.
                 """
-                return self.__svrconfig.read_only
+                return self._repo.read_only
 
         @property
         def rename_requests(self):
                 """The number of /rename operation requests that have occurred
                 during the current server session.
                 """
-                return self.__svrconfig.pkgs_renamed
+                return self._repo.pkgs_renamed
 
         @property
         def web_root(self):
                 """The file system path where the server's web content is
                 located.
                 """
-                return self.__svrconfig.web_root
-
+                return self._web_root
 
-        def get_repo_attrs(self):
+        def get_repo_properties(self):
                 """Returns a dictionary of repository configuration
-                attributes organized by section, with each section's keys
+                properties organized by section, with each section's keys
                 as a list.
 
-                Available attributes are as follows:
+                Available properties are as follows:
 
-                Section     Attribute           Description
+                Section     Property            Description
                 ==========  ==========          ===============
                 publisher   alias               An alternative name for the
                                                 publisher of the packages in
@@ -436,11 +447,6 @@
                             description         A descriptive paragraph for the
                                                 feed.
 
-                            publisher           A fully-qualified domain name or
-                                                email address that is used to
-                                                generate a unique identifier for
-                                                each entry in the feed.
-
                             icon                A filename of a small image that
                                                 is used to visually represent
                                                 the feed.
@@ -457,30 +463,27 @@
                                                 creating the feed for the
                                                 repository updatelog.
                 """
-                return self.__rcconfig.get_attributes()
+                return self._repo.cfg.get_properties()
 
-        def get_repo_attr_value(self, section, attr):
+        def get_repo_property_value(self, section, prop):
                 """Returns the current value of a repository configuration
-                attribute for the specified section.
+                property for the specified section.
                 """
-                return self.__rcconfig.get_attribute(section, attr)
+                return self._repo.cfg.get_property(section, prop)
+
 
 class RequestInterface(_Interface):
         """This class presents an interface to server request objects that
         clients may use.
         """
 
-        def __init__(self, version_id, base):
-                _Interface.__init__(self, version_id, base)
-                self.__request = base._request
-
         def get_accepted_languages(self):
                 """Returns a list of the languages accepted by the client
                 sorted by priority.  This information is derived from the
                 Accept-Language header provided by the client.
                 """
                 alist = []
-                for entry in self.__request.headers.elements("Accept-Language"):
+                for entry in self._request.headers.elements("Accept-Language"):
                         alist.append(str(entry).split(";")[0])
 
                 return alist
@@ -488,7 +491,7 @@
         def get_rel_path(self, uri):
                 """Returns uri relative to the current request path.
                 """
-                return pkg.misc.get_rel_path(self.__request, uri)
+                return pkg.misc.get_rel_path(self._request, uri)
 
         def log(self, msg):
                 """Instruct the server to log the provided message to its error
@@ -501,14 +504,14 @@
                 """A dict containing the parameters sent in the request, either
                 in the query string or in the request body.
                 """
-                return self.__request.params
+                return self._request.params
 
         @property
         def path_info(self):
                 """A string containing the "path_info" portion of the requested
                 URL.
                 """
-                return self.__request.path_info
+                return self._request.path_info
 
         @property
         def query_string(self):
--- a/src/modules/server/catalog.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/catalog.py	Fri Oct 23 17:43:37 2009 -0500
@@ -51,7 +51,6 @@
 import urllib
 
 from pkg.misc import EmptyI
-from pkg.server.errors import SvrConfigError
 
 class CatalogException(Exception):
         def __init__(self, args=None):
@@ -231,12 +230,12 @@
                         except EnvironmentError, e:
                                 if e.errno == errno.EACCES:
                                         if has_writable_root:
-                                                raise SvrConfigError(
+                                                raise RuntimeError(
                                                     _("writable root not "
                                                     "writable by current user "
                                                     "id or group."))
                                         else:
-                                                raise SvrConfigError(
+                                                raise RuntimeError(
                                                     _("unable to write to "
                                                     "index directory."))
                                 raise
@@ -811,7 +810,7 @@
                         # file.
                         if tmpfile:
                                 portable.remove(tmpfile)
-                        if e.errno == errno.EACCES:
+                        if e.errno in (errno.EACCES, errno.EROFS):
                                 return
                         else:
                                 raise
--- a/src/modules/server/config.py	Thu Oct 22 19:13:06 2009 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,321 +0,0 @@
-#!/usr/bin/python2.4
-#
-# CDDL HEADER START
-#
-# The contents of this file are subject to the terms of the
-# Common Development and Distribution License (the "License").
-# You may not use this file except in compliance with the License.
-#
-# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
-# or http://www.opensolaris.org/os/licensing.
-# See the License for the specific language governing permissions
-# and limitations under the License.
-#
-# When distributing Covered Code, include this CDDL HEADER in each
-# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
-# If applicable, add the following below this CDDL HEADER, with the
-# fields enclosed by brackets "[]" replaced with your own identifying
-# information: Portions Copyright [yyyy] [name of copyright owner]
-#
-# CDDL HEADER END
-#
-
-#
-# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
-#
-
-import errno
-import os
-import os.path
-import random
-import shutil
-
-import pkg.server.catalog as catalog
-import pkg.server.transaction as trans
-import pkg.updatelog as updatelog
-
-from pkg.server.errors import SvrConfigError
-
-# depot Server Configuration
-class SvrConfig(object):
-        """Server configuration and state object.  The publisher is the default
-        publisher under which packages will be stored.  Repository locations are
-        the primary derived configuration.  State is the current set of
-        transactions and packages stored by the repository.
-
-        If 'auto_create' is True, a new repository will be created at the
-        location specified by 'repo_root' if one does not already exist."""
-
-        def __init__(self, repo_root, content_root, publisher,
-            auto_create=False, fork_allowed=False, writable_root=None):
-                self.set_repo_root(repo_root)
-                self.set_content_root(content_root)
-                self.has_writable_root = False
-                if writable_root:
-                        self.set_writable_root(writable_root)
-
-                self.required_dirs = [self.trans_root, self.file_root,
-                    self.pkg_root, self.cat_root, self.update_root]
-                self.optional_dirs = [self.index_root]
-
-                self.auto_create = auto_create
-                self.publisher = publisher
-                self.fork_allowed = fork_allowed
-                self.read_only = False
-                self.mirror = False
-
-                self.in_flight_trans = {}
-
-                # XXX naive:  change to
-                # catalog_requests = [ (IP-addr, time), ... ]
-                # manifest_requests = { fmri : (IP-addr, time), ... }
-                # file requests = [ (IP-addr, time), ... ]
-                self.catalog_requests = 0
-                self.manifest_requests = 0
-                self.file_requests = 0
-                self.flist_requests = 0
-                self.flist_files = 0
-                self.pkgs_renamed = 0
-
-        def init_dirs(self):
-                emsg = _("repository directories incomplete")
-                if self.auto_create:
-                        for d in self.required_dirs + self.optional_dirs:
-                                try:
-                                        os.makedirs(d)
-                                except EnvironmentError, e:
-                                        if e.errno == errno.EACCES:
-                                                emsg = _("repository "
-                                                    "directories not writeable "
-                                                    "by current user id or "
-                                                    "group and are incomplete")
-                                        elif e.errno != errno.EEXIST:
-                                                raise
-
-                for d in self.required_dirs:
-                        if not os.path.exists(d):
-                                if self.auto_create:
-                                        raise SvrConfigError(emsg)
-                                raise SvrConfigError(_("The specified "
-                                    "repository root '%s' is not a valid "
-                                    "repository.") % self.repo_root)
-
-                if self.content_root and not os.path.exists(self.content_root):
-                        raise SvrConfigError(_("The specified content root "
-                            "'%s' does not exist.") % self.content_root)
-
-                return
-
-        def set_repo_root(self, root):
-                self.repo_root = os.path.abspath(root)
-                self.trans_root = os.path.join(self.repo_root, "trans")
-                self.file_root = os.path.join(self.repo_root, "file")
-                self.pkg_root = os.path.join(self.repo_root, "pkg")
-                self.cat_root = os.path.join(self.repo_root, "catalog")
-                self.update_root = os.path.join(self.repo_root, "updatelog")
-                self.index_root = os.path.join(self.repo_root, "index")
-                self.feed_cache_root = self.repo_root
-
-        def set_content_root(self, root):
-                if root:
-                        self.content_root = os.path.abspath(root)
-                        self.web_root = os.path.join(self.content_root, "web")
-                else:
-                        self.content_root = None
-                        self.web_root = None
-
-        def set_writable_root(self, root):
-                root = os.path.abspath(root)
-                self.index_root = os.path.join(root, "index")
-                self.feed_cache_root = root
-                self.has_writable_root = True
-
-        def set_read_only(self):
-                self.read_only = True
-
-        def set_mirror(self):
-                self.mirror = True
-
-        def is_read_only(self):
-                return self.read_only
-
-        def feed_cache_read_only(self):
-                return self.read_only and not self.has_writable_root
-
-        def is_mirror(self):
-                return self.mirror
-
-        def acquire_in_flight(self):
-                """Walk trans_root, acquiring valid transaction IDs."""
-                tree = os.walk(self.trans_root)
-
-                for txn in tree:
-                        if txn[0] == self.trans_root:
-                                continue
-
-                        t = trans.Transaction()
-                        t.reopen(self, txn[0])
-
-                        self.in_flight_trans[t.get_basename()] = t
-
-        def acquire_catalog(self, rebuild=False, verbose=False):
-                """Tell the catalog to set itself up.  Associate an
-                instance of the catalog with this depot."""
-
-                if self.is_mirror():
-                        return
-
-                if rebuild:
-                        self.destroy_catalog()
-
-                self.catalog = catalog.ServerCatalog(self.cat_root,
-                    pkg_root=self.pkg_root, read_only=self.read_only,
-                    index_root=self.index_root, repo_root=self.repo_root,
-                    rebuild=rebuild, verbose=verbose,
-                    fork_allowed=self.fork_allowed,
-                    has_writable_root=self.has_writable_root)
-
-                # UpdateLog allows server to issue incremental catalog updates
-                self.updatelog = updatelog.UpdateLog(self.update_root,
-                    self.catalog)
-
-        def destroy_catalog(self):
-                """Destroy the catalog.  This is generally done before we
-                re-create a new catalog."""
-
-                if os.path.exists(self.cat_root):
-                        shutil.rmtree(self.cat_root)
-
-                if os.path.exists(self.update_root):
-                        shutil.rmtree(self.update_root)
-
-        def get_status(self):
-                """Display simple server status."""
-
-                if self.mirror:
-                        ret = """\
-Number of files served: %d
-Number of flists requested: %d
-Number of files served by flist: %d
-""" % (self.file_requests, self.flist_requests, self.flist_files)
-                else:
-                        ret = """\
-Number of packages: %d
-Number of in-flight transactions: %d
-
-Number of catalogs served: %d
-Number of manifests served: %d
-Number of files served: %d
-Number of flists requested: %d
-Number of files served by flist: %d
-Number of packages renamed: %d
-""" % (self.catalog.npkgs(), len(self.in_flight_trans),
-    self.catalog_requests, self.manifest_requests,
-    self.file_requests, self.flist_requests, self.flist_files,
-    self.pkgs_renamed)
-
-                return ret
-
-        def inc_catalog(self):
-                self.catalog_requests += 1
-
-        def inc_manifest(self):
-                self.manifest_requests += 1
-
-        def inc_file(self):
-                self.file_requests += 1
-
-        def inc_flist(self):
-                self.flist_requests += 1
-
-        def inc_flist_files(self):
-                self.flist_files += 1
-
-        def inc_renamed(self):
-                self.pkgs_renamed += 1
-
-        def search_available(self):
-                return self.catalog.search_available()
-
-class NastySvrConfig(SvrConfig):
-        """A subclass of SvrConfig that helps implement options
-        for the Nasty server, which misbehaves in order to test
-        the client's failure resistance."""
-
-        def __init__(self, repo_root, content_root, publisher,
-            auto_create=False, fork_allowed=False, writable_root=None):
-
-                # Call parent's constructor
-                SvrConfig.__init__(self, repo_root, content_root, publisher,
-                    auto_create, fork_allowed, writable_root)
-
-                self.nasty = 0
- 
-        def acquire_catalog(self, rebuild=False, verbose=False):
-                """Tell the catalog to set itself up.  Associate an
-                instance of the catalog with this depot."""
-
-                if self.is_mirror():
-                        return
-
-                if rebuild:
-                        self.destroy_catalog()
-
-                self.catalog = catalog.NastyServerCatalog(self.cat_root,
-                    pkg_root=self.pkg_root, read_only=self.read_only,
-                    index_root=self.index_root, repo_root=self.repo_root,
-                    rebuild=rebuild, verbose=verbose,
-                    fork_allowed=self.fork_allowed,
-                    has_writable_root=self.has_writable_root)
-
-                # UpdateLog allows server to issue incremental catalog updates
-                self.updatelog = updatelog.NastyUpdateLog(self.update_root,
-                    self.catalog)
-
-        def set_nasty(self, level):
-                """Set the nasty level using an integer."""
-
-                self.nasty = level
-
-        def is_nasty(self):
-                """Returns true if nasty has been enabled."""
-
-                if self.nasty > 0:
-                        return True
-                return False
-
-        def need_nasty(self):
-                """Randomly returns true when the server should misbehave."""
-
-                if random.randint(1, 100) <= self.nasty:
-                        return True
-                return False
-
-        def need_nasty_bonus(self, bonus=0):
-                """Used to temporarily apply extra nastiness to an operation."""
-
-                if self.nasty + bonus > 95:
-                        nasty = 95
-                else:
-                        nasty = self.nasty + bonus
-
-                if random.randint(1, 100) <= nasty:
-                        return True
-                return False
-
-        def need_nasty_occasionally(self):
-                if random.randint(1, 500) <= self.nasty:
-                        return True
-                return False
-
-        def need_nasty_infrequently(self):
-                if random.randint(1, 2000) <= self.nasty:
-                        return True
-                return False
-
-        def need_nasty_rarely(self):
-                if random.randint(1, 20000) <= self.nasty:
-                        return True
-                return False
-
--- a/src/modules/server/depot.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/depot.py	Fri Oct 23 17:43:37 2009 -0500
@@ -51,13 +51,12 @@
 
 import pkg
 import pkg.actions as actions
+import pkg.catalog as catalog
 import pkg.client.publisher as publisher
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import pkg.misc as misc
 import pkg.p5i as p5i
-import pkg.server.catalog as catalog
-import pkg.server.errors as errors
 import pkg.server.face as face
 import pkg.server.repository as repo
 import pkg.version as version
@@ -109,24 +108,31 @@
             "publisher"
         ]
 
-        def __init__(self, scfg, cfgpathname=None):
+        content_root = None
+        web_root = None
+
+        def __init__(self, repo, content_root, disable_ops=misc.EmptyI):
                 """Initialize and map the valid operations for the depot.  While
                 doing so, ensure that the operations have been explicitly
                 "exposed" for external usage."""
 
-                self.__repo = repo.Repository(scfg, cfgpathname)
-                self.rcfg = self.__repo.rcfg
-                self.scfg = self.__repo.scfg
+                self._repo = repo
+                if content_root:
+                        self.content_root = os.path.abspath(content_root)
+                        self.web_root = os.path.join(self.content_root, "web")
+                else:
+                        self.content_root = None
+                        self.web_root = None
 
                 # Handles the BUI (Browser User Interface).
-                face.init(scfg, self.rcfg)
+                face.init(repo, self.web_root)
 
                 # Store any possible configuration changes.
-                self.__repo.write_config()
+                repo.write_config()
 
-                if scfg.is_mirror():
+                if repo.mirror:
                         self.ops_list = self.REPO_OPS_MIRROR
-                elif scfg.is_read_only():
+                elif repo.read_only:
                         self.ops_list = self.REPO_OPS_READONLY
                 else:
                         self.ops_list = self.REPO_OPS_DEFAULT
@@ -143,6 +149,9 @@
 
                         if op not in self.ops_list:
                                 continue
+                        if op in disable_ops and (ver in disable_ops[op] or
+                            "*" in disable_ops[op]):
+                                continue
 
                         func.__dict__["exposed"] = True
 
@@ -167,20 +176,8 @@
                         cherrypy.engine.subscribe("graceful", self.refresh)
 
         def refresh(self):
-                """Catch SIGUSR1 and restart the depot (picking up any
-                changes to the cfg_cache that may have been made.
-                """
-
-                self.scfg.acquire_in_flight()
-                try:
-                        self.scfg.acquire_catalog(verbose=True)
-                except (catalog.CatalogPermissionsException, errors.SvrConfigError), e:
-                        self.bus.log("pkg.depotd: %s" % e)
-                        self.bus.exit()
-
-                self.__repo.load_config(self.__repo.cfgpathname)
-                self.rcfg = self.__repo.rcfg
-                face.init(self.scfg, self.rcfg)
+                """Catch SIGUSR1 and reload the depot information."""
+                self._repo.reload()
 
         @cherrypy.expose
         def default(self, *tokens, **params):
@@ -198,8 +195,8 @@
                 elif op not in self.vops:
                         request = cherrypy.request
                         response = cherrypy.response
-                        return face.respond(self.scfg, self.rcfg,
-                            request, response, *tokens, **params)
+                        return face.respond(self._repo, self.content_root,
+                            self.web_root, request, response)
 
                 # If we get here, we know that 'operation' is supported.
                 # Ensure that we have a integer protocol version.
@@ -246,7 +243,7 @@
                     start_point=None))]
 
                 try:
-                        res_list = self.__repo.search(query_args_lst)
+                        res_list = self._repo.search(query_args_lst)
                 except repo.RepositorySearchUnavailableError, e:
                         raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
                             str(e))
@@ -302,12 +299,12 @@
 
                 response = cherrypy.response
 
-                if not self.scfg.search_available():
+                if not self._repo.search_available:
                         raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
                             "Search temporarily unavailable")
 
                 try:
-                        res_list = self.__repo.search(query_str_lst)
+                        res_list = self._repo.search(query_str_lst)
                 except (ParseError, BooleanQueryException), e:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
                 except repo.RepositorySearchUnavailableError, e:
@@ -358,45 +355,24 @@
 
         search_1._cp_config = { "response.stream": True }
 
-
         def catalog_0(self, *tokens):
-                """Provide an incremental update or full version of the
-                catalog, as appropriate, to the requesting client."""
+                """Provide a full version of the catalog, as appropriate, to
+                the requesting client.  Incremental catalogs are not supported
+                for v0 catalog clients."""
 
                 request = cherrypy.request
 
+                # Response headers have to be setup *outside* of the function
+                # that yields the catalog content.
+                c = self._repo.catalog
                 response = cherrypy.response
                 response.headers["Content-type"] = "text/plain"
-                response.headers["Last-Modified"] = \
-                    self.scfg.catalog.last_modified()
-
-                lm = request.headers.get("If-Modified-Since", None)
-                if lm is not None:
-                        try:
-                                lm = catalog.ts_to_datetime(lm)
-                        except ValueError:
-                                lm = None
-                        else:
-                                if not self.scfg.updatelog.enough_history(lm):
-                                        # Ignore incremental requests if there
-                                        # isn't enough history to provide one.
-                                        lm = None
-                                elif self.scfg.updatelog.up_to_date(lm):
-                                        response.status = httplib.NOT_MODIFIED
-                                        return
-
-                if lm:
-                        # If a last modified date and time was provided, then an
-                        # incremental update is being requested.
-                        response.headers["X-Catalog-Type"] = "incremental"
-                else:
-                        response.headers["X-Catalog-Type"] = "full"
-                        response.headers["Content-Length"] = str(
-                            self.scfg.catalog.size())
+                response.headers["Last-Modified"] = c.last_modified.isoformat()
+                response.headers["X-Catalog-Type"] = "full"
 
                 def output():
                         try:
-                                for l in self.__repo.catalog(lm):
+                                for l in self._repo.catalog_0():
                                         yield l
                         except repo.RepositoryError, e:
                                 # Can't do anything in a streaming generator
@@ -408,20 +384,58 @@
 
         catalog_0._cp_config = { "response.stream": True }
 
+        def catalog_1(self, *tokens):
+                """Outputs the contents of the specified catalog file, using the
+                name in the request path, directly to the client."""
+
+                try:
+                        name = tokens[0]
+                except IndexError:
+                        raise cherrypy.HTTPError(httplib.FORBIDDEN,
+                            _("Directory listing not allowed."))
+
+                try:
+                        fpath = self._repo.catalog_1(name)
+                except repo.RepositoryCatalogNotFoundError, e:
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+                except repo.RepositoryError, e:
+                        # Treat any remaining repository error as a 404, but
+                        # log the error and include the real failure
+                        # information.
+                        cherrypy.log("Request failed: %s" % str(e))
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+                return serve_file(fpath, "text/plain")
+
+        catalog_1._cp_config = { "response.stream": True }
+
         def manifest_0(self, *tokens):
                 """The request is an encoded pkg FMRI.  If the version is
                 specified incompletely, we return an error, as the client is
                 expected to form correct requests based on its interpretation
                 of the catalog and its image policies."""
 
+                try:
+                        cat = self._repo.catalog
+                        pubs = cat.publishers()
+                except Exception, e:
+                        cherrypy.log("Request Failed: %s" % e)
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
+                # A broken proxy (or client) has caused a fully-qualified FMRI
+                # to be split up.
+                comps = [t for t in tokens]
+                if comps[0] == "pkg:" and comps[1] in pubs:
+                        # Only one slash here as another will be added below.
+                        comps[0] += "/"
+
                 # Parse request into FMRI component and decode.
                 try:
                         # If more than one token (request path component) was
                         # specified, assume that the extra components are part
                         # of the fmri and have been split out because of bad
                         # proxy behaviour.
-                        pfmri = "/".join(tokens)
-                        fpath = self.__repo.manifest(pfmri)
+                        pfmri = "/".join(comps)
+                        fpath = self._repo.manifest(pfmri)
                 except (IndexError, repo.RepositoryInvalidFMRIError), e:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
                 except repo.RepositoryError, e:
@@ -468,7 +482,7 @@
                 is output directly to the client. """
 
                 try:
-                        self.scfg.inc_flist()
+                        self._repo.inc_flist()
 
                         # Create a dummy file object that hooks to the write()
                         # callable which is all tarfile needs to output the
@@ -495,7 +509,7 @@
 
                         for v in params.values():
                                 filepath = os.path.normpath(os.path.join(
-                                    self.scfg.file_root,
+                                    self._repo.file_root,
                                     misc.hash_file_name(v)))
 
                                 # If file isn't here, skip it
@@ -503,8 +517,7 @@
                                         continue
 
                                 tar_stream.add(filepath, v, False)
-
-                                self.scfg.inc_flist_files()
+                                self._repo.inc_flist_files()
 
                         # Flush the remaining bytes to the client.
                         tar_stream.close()
@@ -542,7 +555,7 @@
                         fhash = None
 
                 try:
-                        fpath = self.__repo.file(fhash)
+                        fpath = self._repo.file(fhash)
                 except repo.RepositoryFileNotFoundError, e:
                         raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
                 except repo.RepositoryError, e:
@@ -573,7 +586,7 @@
                 # signed certificate (or a more elaborate system).
 
                 try:
-                        trans_id = self.__repo.open(client_release, pfmri)
+                        trans_id = self._repo.open(client_release, pfmri)
                         response.headers["Content-type"] = "text/plain"
                         response.headers["Transaction-ID"] = trans_id
                 except repo.RepositoryError, e:
@@ -614,7 +627,7 @@
                             "X-IPkg-Refresh-Index: %s" % e)
 
                 try:
-                        pfmri, pstate = self.__repo.close(trans_id,
+                        pfmri, pstate = self._repo.close(trans_id,
                             refresh_index=refresh_index)
                 except repo.RepositoryError, e:
                         # Assume a bad request was made.  A 404 can't be
@@ -638,7 +651,7 @@
                         trans_id = None
 
                 try:
-                        self.__repo.abandon(trans_id)
+                        self._repo.abandon(trans_id)
                 except repo.RepositoryError, e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
@@ -695,7 +708,7 @@
                 #         self.critical = True
 
                 try:
-                        self.__repo.add(trans_id, action)
+                        self._repo.add(trans_id, action)
                 except repo.RepositoryError, e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
@@ -720,7 +733,7 @@
 
                 if cmd == "refresh":
                         try:
-                                self.__repo.refresh_index()
+                                self._repo.refresh_index()
                         except repo.RepositoryError, e:
                                 # Assume a bad request was made.  A 404 can't be
                                 # returned here as misc.versioned_urlopen will interpret
@@ -741,13 +754,27 @@
                     on its interpretation of the catalog and its image
                     policies. """
 
+                try:
+                        cat = self._repo.catalog
+                        pubs = cat.publishers()
+                except Exception, e:
+                        cherrypy.log("Request Failed: %s" % e)
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
+                # A broken proxy (or client) has caused a fully-qualified FMRI
+                # to be split up.
+                comps = [t for t in tokens]
+                if comps[0] == "pkg:" and comps[1] in pubs:
+                        # Only one slash here as another will be added below.
+                        comps[0] += "/"
+
                 # Parse request into FMRI component and decode.
                 try:
                         # If more than one token (request path component) was
                         # specified, assume that the extra components are part
                         # of the fmri and have been split out because of bad
                         # proxy behaviour.
-                        pfmri = "/".join(tokens)
+                        pfmri = "/".join(comps)
                 except IndexError:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST)
 
@@ -762,7 +789,8 @@
                 m.set_fmri(None, pfmri)
 
                 try:
-                        mpath = os.path.join(self.scfg.pkg_root, f.get_dir_path())
+                        mpath = os.path.join(self._repo.pkg_root,
+                            f.get_dir_path())
                 except fmri.FmriError, e:
                         # If the FMRI operation couldn't be performed, assume
                         # the client made a bad request.
@@ -775,7 +803,7 @@
 
                 publisher, name, ver = f.tuple()
                 if not publisher:
-                        publisher = self.rcfg.get_attribute("publisher",
+                        publisher = self._repo.cfg.get_property("publisher",
                             "prefix")
                         if not publisher:
                                 publisher = ""
@@ -788,7 +816,7 @@
                                 lsummary.write("\n")
 
                         lpath = os.path.normpath(os.path.join(
-                            self.scfg.file_root,
+                            self._repo.file_root,
                             misc.hash_file_name(entry.hash)))
 
                         lfile = file(lpath, "rb")
@@ -814,16 +842,17 @@
 
         def __get_publisher(self):
                 rargs = {}
-                for attr in ("collection_type", "description", "legal_uris",
+                for prop in ("collection_type", "description", "legal_uris",
                     "mirrors", "name", "origins", "refresh_seconds",
                     "registration_uri", "related_uris"):
-                        rargs[attr] = self.rcfg.get_attribute("repository",
-                            attr)
+                        rargs[prop] = self._repo.cfg.get_property(
+                            "repository", prop)
 
                 repo = publisher.Repository(**rargs)
-                alias = self.rcfg.get_attribute("publisher", "alias")
-                pfx = self.rcfg.get_attribute("publisher", "prefix")
-                return publisher.Publisher(pfx, alias=alias, repositories=[repo])
+                alias = self._repo.cfg.get_property("publisher", "alias")
+                pfx = self._repo.cfg.get_property("publisher", "prefix")
+                return publisher.Publisher(pfx, alias=alias,
+                    repositories=[repo])
 
         @cherrypy.tools.response_headers(headers=[(
             "Content-Type", p5i.MIME_TYPE)])
@@ -861,11 +890,25 @@
                 datastream as provided."""
 
                 try:
+                        cat = self._repo.catalog
+                        pubs = cat.publishers()
+                except Exception, e:
+                        cherrypy.log("Request Failed: %s" % e)
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
+                # A broken proxy (or client) has caused a fully-qualified FMRI
+                # to be split up.
+                comps = [t for t in tokens]
+                if comps[0] == "pkg:" and comps[1] in pubs:
+                        # Only one slash here as another will be added below.
+                        comps[0] += "/"
+
+                try:
                         # If more than one token (request path component) was
                         # specified, assume that the extra components are part
                         # of an FMRI and have been split out because of bad
                         # proxy behaviour.
-                        pfmri = "/".join(tokens)
+                        pfmri = "/".join(comps)
                 except IndexError:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST)
 
@@ -892,11 +935,11 @@
 
                 # Attempt to find matching entries in the catalog.
                 try:
-                        cat = self.scfg.catalog
-                        matches = catalog.extract_matching_fmris(cat.fmris(),
+                        matches, unmatched = catalog.extract_matching_fmris(cat.fmris(),
                             patterns=[pfmri], constraint=version.CONSTRAINT_AUTO,
                             matcher=matcher)
                 except Exception, e:
+                        cherrypy.log("Request Failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
                 if not matches:
@@ -941,21 +984,16 @@
         """A class that creates a depot that misbehaves.  Naughty
         depots are useful for testing."""
 
-
-        def __init__(self, scfg, cfgpathname=None):
-                """Include config in scfg, and cfgpathname, if needed."""
+        def __init__(self, repo, content_root):
+                """Initialize."""
 
-                DepotHTTP.__init__(self, scfg, cfgpathname)
-
-                self.__repo = repo.NastyRepository(scfg, cfgpathname)
-                self.rcfg = self.__repo.rcfg
-                self.scfg = self.__repo.scfg
+                DepotHTTP.__init__(self, repo, content_root)
 
                 # Handles the BUI (Browser User Interface).
-                face.init(scfg, self.rcfg)
+                face.init(self._repo, self.web_root)
 
                 # Store any possible configuration changes.
-                self.__repo.write_config()
+                self._repo.write_config()
 
                 self.requested_files = []
 
@@ -973,50 +1011,30 @@
 
                 # NASTY
                 # emit error code that client should know how to retry
-                if cherrypy.request.app.root.scfg.need_nasty_bonus(bonus):
+                if cherrypy.request.app.root._repo.cfg.need_nasty_bonus(bonus):
                         code = retryable_errors[random.randint(0,
                             len(retryable_errors) - 1)]
                         raise cherrypy.HTTPError(code)
 
         # Override _cp_config for catalog_0 operation
         def catalog_0(self, *tokens):
-                """Provide an incremental update or full version of the
-                catalog, as appropriate, to the requesting client."""
+                """Provide a full version of the catalog, as appropriate, to
+                the requesting client.  Incremental catalogs are not supported
+                for v0 catalog clients."""
 
                 request = cherrypy.request
 
+                # Response headers have to be setup *outside* of the function
+                # that yields the catalog content.
+                c = self._repo.catalog
                 response = cherrypy.response
                 response.headers["Content-type"] = "text/plain"
-                response.headers["Last-Modified"] = \
-                    self.scfg.catalog.last_modified()
-
-                lm = request.headers.get("If-Modified-Since", None)
-                if lm is not None:
-                        try:
-                                lm = catalog.ts_to_datetime(lm)
-                        except ValueError:
-                                lm = None
-                        else:
-                                if not self.scfg.updatelog.enough_history(lm):
-                                        # Ignore incremental requests if there
-                                        # isn't enough history to provide one.
-                                        lm = None
-                                elif self.scfg.updatelog.up_to_date(lm):
-                                        response.status = httplib.NOT_MODIFIED
-                                        return
-
-                if lm:
-                        # If a last modified date and time was provided, then an
-                        # incremental update is being requested.
-                        response.headers["X-Catalog-Type"] = "incremental"
-                else:
-                        response.headers["X-Catalog-Type"] = "full"
-                        response.headers["Content-Length"] = str(
-                            self.scfg.catalog.size())
+                response.headers["Last-Modified"] = c.last_modified.isoformat()
+                response.headers["X-Catalog-Type"] = "full"
 
                 def output():
                         try:
-                                for l in self.__repo.catalog(lm):
+                                for l in self._repo.catalog_0():
                                         yield l
                         except repo.RepositoryError, e:
                                 # Can't do anything in a streaming generator
@@ -1036,14 +1054,28 @@
                 expected to form correct requests based on its interpretation
                 of the catalog and its image policies."""
 
+                try:
+                        cat = self._repo.catalog
+                        pubs = cat.publishers()
+                except Exception, e:
+                        cherrypy.log("Request Failed: %s" % e)
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
+                # A broken proxy (or client) has caused a fully-qualified FMRI
+                # to be split up.
+                comps = [t for t in tokens]
+                if comps[0] == "pkg:" and comps[1] in pubs:
+                        # Only one slash here as another will be added below.
+                        comps[0] += "/"
+
                 # Parse request into FMRI component and decode.
                 try:
                         # If more than one token (request path component) was
                         # specified, assume that the extra components are part
                         # of the fmri and have been split out because of bad
                         # proxy behaviour.
-                        pfmri = "/".join(tokens)
-                        fpath = self.__repo.manifest(pfmri)
+                        pfmri = "/".join(comps)
+                        fpath = self._repo.manifest(pfmri)
                 except (IndexError, repo.RepositoryInvalidFMRIError), e:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
                 except repo.RepositoryError, e:
@@ -1055,12 +1087,12 @@
 
                 # NASTY
                 # Send an error before serving the file, perhaps
-                if self.scfg.need_nasty():
+                if self._repo.cfg.need_nasty():
                         self.nasty_retryable_error()
-                elif self.scfg.need_nasty_infrequently():
+                elif self._repo.cfg.need_nasty_infrequently():
                         # Fall asleep before finishing the request
                         time.sleep(35)
-                elif self.scfg.need_nasty_rarely():
+                elif self._repo.cfg.need_nasty_rarely():
                         # Forget that the manifest is here
                         raise cherrypy.HTTPError(httplib.NOT_FOUND)
 
@@ -1076,10 +1108,10 @@
                 is output directly to the client. """
 
                 try:
-                        self.scfg.inc_flist()
+                        self._repo.inc_flist()
 
                         # NASTY
-                        if self.scfg.need_nasty_occasionally():
+                        if self._repo.cfg.need_nasty_occasionally():
                                 return
 
                         # Create a dummy file object that hooks to the write()
@@ -1106,7 +1138,7 @@
                             self._tar_stream_close, failsafe = True)
 
                         # NASTY
-                        if self.scfg.need_nasty_infrequently():
+                        if self._repo.cfg.need_nasty_infrequently():
                                 time.sleep(35)
 
                         for v in params.values():
@@ -1121,18 +1153,18 @@
                                         self.requested_files.append(v)
 
                                 # NASTY
-                                if self.scfg.need_nasty_infrequently():
+                                if self._repo.cfg.need_nasty_infrequently():
                                         # Give up early
                                         break
-                                elif self.scfg.need_nasty_infrequently():
+                                elif self._repo.cfg.need_nasty_infrequently():
                                         # Skip this file
                                         continue
-                                elif self.scfg.need_nasty_rarely():
+                                elif self._repo.cfg.need_nasty_rarely():
                                         # Take a nap
                                         time.sleep(35)
 
                                 filepath = os.path.normpath(os.path.join(
-                                    self.scfg.file_root,
+                                    self._repo.file_root,
                                     misc.hash_file_name(v)))
 
                                 # If file isn't here, skip it
@@ -1141,38 +1173,37 @@
 
                                 # NASTY
                                 # Send a file with the wrong content
-                                if self.scfg.need_nasty_rarely():
+                                if self._repo.cfg.need_nasty_rarely():
                                         pick = random.randint(0,
                                             len(self.requested_files) - 1)
                                         badfn = self.requested_files[pick]
                                         badpath = os.path.normpath(
-                                            os.path.join(self.scfg.file_root,
+                                            os.path.join(self._repo.file_root,
                                             misc.hash_file_name(badfn)))
 
                                         tar_stream.add(badpath, v, False)
                                 else:
                                         tar_stream.add(filepath, v, False)
 
-                                self.scfg.inc_flist_files()
+                                self._repo.inc_flist_files()
 
                         # NASTY
                         # Write garbage into the stream
-                        if self.scfg.need_nasty_infrequently():
+                        if self._repo.cfg.need_nasty_infrequently():
                                 f.write("NASTY!")
 
                         # NASTY
                         # Send an extraneous file
-                        if self.scfg.need_nasty_infrequently():
+                        if self._repo.cfg.need_nasty_infrequently():
                                 pick = random.randint(0,
                                     len(self.requested_files) - 1)
                                 extrafn = self.requested_files[pick]
                                 extrapath = os.path.normpath(
-                                    os.path.join(self.scfg.file_root,
+                                    os.path.join(self._repo.file_root,
                                     misc.hash_file_name(extrafn)))
 
                                 tar_stream.add(extrapath, extrafn, False)
 
-
                         # Flush the remaining bytes to the client.
                         tar_stream.close()
                         cherrypy.request.tar_stream = None
@@ -1210,7 +1241,7 @@
                         fhash = None
 
                 try:
-                        fpath = self.__repo.file(fhash)
+                        fpath = self._repo.file(fhash)
                 except repo.RepositoryFileNotFoundError, e:
                         raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
                 except repo.RepositoryError, e:
@@ -1222,8 +1253,7 @@
 
                 # NASTY
                 # Stash filename for later use.
-                # Toss out the list if it's larger than 1024
-                # items.
+                # Toss out the list if it's larger than 1024 items.
                 if len(self.requested_files) > 1024:
                         self.requested_files = [fhash]
                 else:
@@ -1231,22 +1261,22 @@
 
                 # NASTY
                 # Send an error before serving the file, perhaps
-                if self.scfg.need_nasty():
+                if self._repo.cfg.need_nasty():
                         self.nasty_retryable_error()
-                elif self.scfg.need_nasty_rarely():
+                elif self._repo.cfg.need_nasty_rarely():
                         # Fall asleep before finishing the request
                         time.sleep(35)
-                elif self.scfg.need_nasty_rarely():
+                elif self._repo.cfg.need_nasty_rarely():
                         # Forget that the manifest is here
                         raise cherrypy.HTTPError(httplib.NOT_FOUND)
 
                 # NASTY
                 # Send the wrong file
-                if self.scfg.need_nasty_rarely():
+                if self._repo.cfg.need_nasty_rarely():
                         pick = random.randint(0, len(self.requested_files) - 1)
                         badfn = self.requested_files[pick]
                         badpath = os.path.normpath(os.path.join(
-                            self.scfg.file_root, misc.hash_file_name(badfn)))
+                            self._repo.file_root, misc.hash_file_name(badfn)))
 
                         return serve_file(badpath, "application/data")
 
@@ -1267,13 +1297,13 @@
                 try:
                         fst = os.stat(filepath)
                         filesz = fst.st_size
-                        file = open(filepath, "rb")
+                        nfile = open(filepath, "rb")
                 except EnvironmentError:
                         raise cherrypy.HTTPError(httplib.NOT_FOUND)
 
                 # NASTY
                 # Send incorrect content length
-                if self.scfg.need_nasty_rarely():
+                if self._repo.cfg.need_nasty_rarely():
                         response.headers["Content-Length"] = str(filesz +
                                 random.randint(1, 1024))
                         already_nasty = True
@@ -1282,17 +1312,17 @@
 
                 # NASTY
                 # Send truncated file
-                if self.scfg.need_nasty_rarely() and not already_nasty:
-                        response.body = file.read(filesz - random.randint(1,
+                if self._repo.cfg.need_nasty_rarely() and not already_nasty:
+                        response.body = nfile.read(filesz - random.randint(1,
                             filesz - 1))
                         # If we're sending data, lie about the length and
                         # make the client catch us.
                         if content_type == "application/data":
                                 response.headers["Content-Length"] = str(
                                     len(response.body))
-                elif self.scfg.need_nasty_rarely() and not already_nasty:
+                elif self._repo.cfg.need_nasty_rarely() and not already_nasty:
                         # Write garbage into the response
-                        response.body = file.read(filesz)
+                        response.body = nfile.read(filesz)
                         response.body += "NASTY!"
                         # If we're sending data, lie about the length and
                         # make the client catch us.
@@ -1300,6 +1330,6 @@
                                 response.headers["Content-Length"] = str(
                                     len(response.body))
                 else:
-                        response.body = file.read(filesz)
+                        response.body = nfile.read(filesz)
 
                 return response.body
--- a/src/modules/server/errors.py	Thu Oct 22 19:13:06 2009 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-#!/usr/bin/python2.4
-#
-# CDDL HEADER START
-#
-# The contents of this file are subject to the terms of the
-# Common Development and Distribution License (the "License").
-# You may not use this file except in compliance with the License.
-#
-# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
-# or http://www.opensolaris.org/os/licensing.
-# See the License for the specific language governing permissions
-# and limitations under the License.
-#
-# When distributing Covered Code, include this CDDL HEADER in each
-# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
-# If applicable, add the following below this CDDL HEADER, with the
-# fields enclosed by brackets "[]" replaced with your own identifying
-# information: Portions Copyright [yyyy] [name of copyright owner]
-#
-# CDDL HEADER END
-#
-
-#
-# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
-#
-
-class SvrConfigError(Exception):
-        """Base exception class for all server configuration exceptions."""
-
-        def __init__(self, *args):
-                Exception.__init__(self, *args)
-                if args:
-                        self.data = args[0]
-                else:
-                        self.data = None
-
-        def __str__(self):
-                return str(self.data)
--- a/src/modules/server/face.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/face.py	Fri Oct 23 17:43:37 2009 -0500
@@ -26,11 +26,10 @@
 
 import cherrypy
 import cherrypy.lib.static
-import errno
 import httplib
 import os
 import pkg.server.api as api
-import pkg.server.api_errors as api_errors
+import pkg.server.api_errors as sae
 import pkg.server.feed
 import sys
 import urllib
@@ -45,30 +44,28 @@
         sys.exit(2)
 
 tlookup = None
-def init(scfg, rcfg):
-        """Ensure that the BUI is properly initialized.
-        """
+def init(repo, web_root):
+        """Ensure that the BUI is properly initialized."""
         global tlookup
-        pkg.server.feed.init(scfg, rcfg)
-        tlookup = mako.lookup.TemplateLookup(directories=[
-            scfg.web_root
-            ])
+        pkg.server.feed.init(repo)
+        tlookup = mako.lookup.TemplateLookup(directories=[web_root])
 
-def feed(scfg, rcfg, request, response):
-        if scfg.is_mirror():
+def feed(repo, request, response):
+        if repo.mirror:
                 raise cherrypy.HTTPError(httplib.NOT_FOUND,
                     "Operation not supported in current server mode.")
-        if not scfg.updatelog.last_update:
+        if not repo.catalog.updates:
                 raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
                     "No update history; unable to generate feed.")
-        return pkg.server.feed.handle(scfg, rcfg, request, response)
+        return pkg.server.feed.handle(repo, request, response)
 
-def __render_template(request, scfg, rcfg, path):
+def __render_template(repo, content_root, web_root, request, path):
         template = tlookup.get_template(path)
-        base = api.BaseInterface(request, scfg, rcfg)
+        base = api.BaseInterface(request, repo, content_root=content_root,
+            web_root=web_root)
         return template.render_unicode(g_vars={ "base": base })
 
-def __handle_error(request, path, error):
+def __handle_error(path, error):
         # All errors are treated as a 404 since reverse proxies such as Apache
         # don't handle 500 errors in a desirable way.  For any error but a 404,
         # an error is logged.
@@ -78,36 +75,44 @@
 
         raise cherrypy.NotFound()
 
-def respond(scfg, rcfg, request, response, *tokens, **params):
+def respond(repo, content_root, web_root, request, response):
         path = request.path_info.strip("/")
         if path == "":
                 path = "index.shtml"
         elif path.split("/")[0] == "feed":
-                return feed(scfg, rcfg, request, response)
+                return feed(repo, request, response)
 
         if not path.endswith(".shtml"):
                 spath = urllib.unquote(path)
-                fname = os.path.join(scfg.web_root, spath)
-                if not os.path.normpath(fname).startswith(os.path.normpath(
-                    scfg.web_root)):
+                fname = os.path.join(web_root, spath)
+                if not os.path.normpath(fname).startswith(
+                    os.path.normpath(web_root)):
                         # Ignore requests for files outside of the web root.
-                        return __handle_error(request, path, httplib.NOT_FOUND)
+                        return __handle_error(path, httplib.NOT_FOUND)
                 else:
                         return cherrypy.lib.static.serve_file(os.path.join(
-                            scfg.web_root, spath))
+                            web_root, spath))
 
         try:
-                return __render_template(request, scfg, rcfg, path)
+                return __render_template(repo, content_root, web_root, request,
+                    path)
+        except sae.VersionException, e:
+                # The user shouldn't see why we can't render a template, but
+                # the reason should be logged (cleanly).
+                cherrypy.log("Template '%(path)s' is incompatible with current "
+                    "server api: %(error)s" % { "path": path,
+                    "error": str(e) })
+                cherrypy.log("Ensure that the correct --content-root has been "
+                    "provided to pkg.depotd.")
+                return __handle_error(request.path_info, httplib.NOT_FOUND)
         except IOError, e:
-                return __handle_error(request, path,
-                    httplib.INTERNAL_SERVER_ERROR)
+                return __handle_error(path, httplib.INTERNAL_SERVER_ERROR)
         except mako.exceptions.TemplateLookupException, e:
                 # The above exception indicates that mako could not locate the
                 # template (in most cases, Mako doesn't seem to always clearly
                 # differentiate).
-                return __handle_error(request, path, httplib.NOT_FOUND)
-        except api_errors.RedirectException, e:
+                return __handle_error(path, httplib.NOT_FOUND)
+        except sae.RedirectException, e:
                 raise cherrypy.HTTPRedirect(e.data)
         except:
-                return __handle_error(request, path,
-                    httplib.INTERNAL_SERVER_ERROR)
+                return __handle_error(path, httplib.INTERNAL_SERVER_ERROR)
--- a/src/modules/server/feed.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/feed.py	Fri Oct 23 17:43:37 2009 -0500
@@ -30,6 +30,7 @@
 
 import cherrypy
 from cherrypy.lib.static import serve_file
+import copy
 import cStringIO
 import datetime
 import httplib
@@ -40,11 +41,10 @@
 import xml.dom.minidom as xmini
 
 from pkg.misc import get_rel_path
-import pkg.server.catalog as catalog
-import pkg.fmri as fmri
+import pkg.catalog as catalog
 import pkg.Uuid25 as uuid
 
-MIME_TYPE = 'application/atom+xml'
+MIME_TYPE = "application/atom+xml"
 CACHE_FILENAME = "feed.xml"
 RFC3339_FMT = "%Y-%m-%dT%H:%M:%SZ"
 
@@ -54,65 +54,39 @@
         """
         return ts.strftime(RFC3339_FMT)
 
-def rfc3339_str_to_ts(ts_str):
-        """Returns a timestamp representing 'ts_str', which should be in the
-        format specified by RFC 3339.
-        """
-        return time.mktime(time.strptime(ts_str, RFC3339_FMT))
-
 def rfc3339_str_to_dt(ts_str):
         """Returns a datetime object representing 'ts_str', which should be in
         the format specified by RFC 3339.
         """
         return datetime.datetime(*time.strptime(ts_str, RFC3339_FMT)[0:6])
 
-def ults_to_ts(ts_str):
-        """Returns a timestamp representing 'ts_str', which should be in
-        updatelog format.
-        """
-        # Python doesn't support fractional seconds for strptime.
-        ts_str = ts_str.split('.')[0]
-        # Currently, updatelog entries are in local time, not UTC.
-        return time.mktime(time.strptime(ts_str, "%Y-%m-%dT%H:%M:%S"))
-
-def ults_to_rfc3339_str(ts_str):
-        """Returns a timestamp representing 'ts_str', which should be in
-        updatelog format.
-        """
-        ltime = ults_to_ts(ts_str)
-        # Currently, updatelog entries are in local time, not UTC.
-        return dt_to_rfc3339_str(datetime.datetime(
-            *time.gmtime(ltime)[0:6]))
-
-def fmri_to_taguri(rcfg, f):
+def fmri_to_taguri(f):
         """Generates a 'tag' uri compliant with RFC 4151.  Visit
         http://www.taguri.org/ for more information.
         """
-        pfx = rcfg.get_attribute("publisher", "prefix")
-        if not pfx:
-                pfx = "unknown"
-        return "tag:%s,%s:%s" % (pfx, f.get_timestamp().strftime("%Y-%m-%d"),
+        return "tag:%s,%s:%s" % (f.publisher,
+            f.get_timestamp().strftime("%Y-%m-%d"),
             urllib.unquote(f.get_url_path()))
 
-def init(scfg, rcfg):
+def init(repo):
         """This function performs general initialization work that is needed
         for feeds to work correctly.
         """
 
-        if not scfg.feed_cache_read_only():
+        if not (repo.read_only and not repo.writable_root):
                 # RSS/Atom feeds require a unique identifier, so
                 # generate one if isn't defined already.  This
                 # needs to be a persistent value, so we only
                 # generate this if we can save the configuration.
-                fid = rcfg.get_attribute("feed", "id")
+                fid = repo.cfg.get_property("feed", "id")
                 if not fid:
                         # Create a random UUID (type 4).
-                        rcfg._set_attribute("feed", "id", uuid.uuid4())
+                        repo.cfg._set_property("feed", "id", uuid.uuid4())
 
                 # Ensure any configuration changes are reflected in the feed.
-                __clear_cache(scfg)
+                __clear_cache(repo)
 
-def set_title(request, rcfg, doc, feed, update_ts):
+def set_title(repo, doc, feed, update_ts):
         """This function attaches the necessary RSS/Atom feed elements needed
         to provide title, author and contact information to the provided
         xmini document object using the provided feed object and update
@@ -121,7 +95,7 @@
 
         t = doc.createElement("title")
         ti = xmini.Text()
-        ti.replaceWholeText(rcfg.get_attribute("feed", "name"))
+        ti.replaceWholeText(repo.cfg.get_property("feed", "name"))
         t.appendChild(ti)
         feed.appendChild(t)
 
@@ -134,7 +108,8 @@
         # identifier.
         i = doc.createElement("id")
         it = xmini.Text()
-        it.replaceWholeText("urn:uuid:%s" % rcfg.get_attribute("feed", "id"))
+        it.replaceWholeText("urn:uuid:%s" % repo.cfg.get_property("feed",
+            "id"))
         i.appendChild(it)
         feed.appendChild(i)
 
@@ -148,18 +123,18 @@
         # Add our icon.
         i = doc.createElement("icon")
         it = xmini.Text()
-        it.replaceWholeText(rcfg.get_attribute("feed", "icon"))
+        it.replaceWholeText(repo.cfg.get_property("feed", "icon"))
         i.appendChild(it)
         feed.appendChild(i)
 
         # Add our logo.
         l = doc.createElement("logo")
         lt = xmini.Text()
-        lt.replaceWholeText(rcfg.get_attribute("feed", "logo"))
+        lt.replaceWholeText(repo.cfg.get_property("feed", "logo"))
         l.appendChild(lt)
         feed.appendChild(l)
 
-        maintainer = rcfg.get_attribute("repository", "maintainer")
+        maintainer = repo.cfg.get_property("repository", "maintainer")
         # The author information isn't required, but can be useful.
         if maintainer:
                 name, email = rfc822.AddressList(maintainer).addresslist[0]
@@ -194,60 +169,50 @@
                 # Done with the author.
                 feed.appendChild(a)
 
-operations = {
-        "+": ["Added", "%s was added to the repository."],
-        "-": ["Removed", "%s was removed from the repository."],
-        "U": ["Updated", "%s, an update to an existing package, was added to "
-            "the repository."]
-}
+add_op = ("Added", "%s was added to the repository.")
+remove_op = ("Removed", "%s was removed from the repository.")
+update_op = ("Updated", "%s, a new version of an existing package, was added "
+    "to the repository.")
 
-def add_transaction(request, scfg, rcfg, doc, feed, txn, fmris):
+def add_transaction(request, doc, feed, entry, first):
         """Each transaction is an entry.  We have non-trivial content, so we
         can omit summary elements.
         """
 
         e = doc.createElement("entry")
 
-        tag, fmri_str = txn["catalog"].split()
-        f = fmri.PkgFmri(fmri_str)
+        pfmri, op_type, op_time, metadata = entry
  
         # Generate a 'tag' uri, to uniquely identify the entry, using the fmri.
         i = xmini.Text()
-        i.replaceWholeText(fmri_to_taguri(rcfg, f))
+        i.replaceWholeText(fmri_to_taguri(pfmri))
         eid = doc.createElement("id")
         eid.appendChild(i)
         e.appendChild(eid)
 
         # Attempt to determine the operation that was performed and generate
         # the entry title and content.
-        if txn["operation"] in operations:
-                op_title, op_content = operations[txn["operation"]]
+        if op_type == catalog.CatalogUpdate.ADD:
+                if pfmri != first:
+                        # XXX renaming, obsoletion?
+                        # If this fmri is not the same as the oldest one
+                        # for the FMRI's package stem, assume this is a
+                        # newer version of that package.
+                        op_title, op_content = update_op
+                else:
+                        op_title, op_content = add_op
+        elif op_type == catalog.CatalogUpdate.REMOVE:
+                op_title, op_content = add_op
         else:
                 # XXX Better way to reflect an error?  (Aborting will make a
                 # non-well-formed document.)
                 op_title = "Unknown Operation"
                 op_content = "%s was changed in the repository."
 
-        if txn["operation"] == "+":
-                # Get all FMRIs matching the current FMRI's package name.
-                matches = fmris[f.pkg_name]
-                if len(matches["versions"]) > 1:
-                        # Get the oldest fmri.
-                        of = matches[str(matches["versions"][0])][0]
-
-                        # If the current fmri isn't the oldest one, then this
-                        # is an update to the package.
-                        if f != of:
-                                # If there is more than one matching FMRI, and
-                                # it isn't the same version as the oldest one,
-                                # we can assume that this is an update to an
-                                # existing package.
-                                op_title, op_content = operations["U"]
-
         # Now add a title for our entry.
         etitle = doc.createElement("title")
         ti = xmini.Text()
-        ti.replaceWholeText(" ".join([op_title, fmri_str]))
+        ti.replaceWholeText(" ".join([op_title, pfmri.get_pkg_stem()]))
         etitle.appendChild(ti)
         e.appendChild(etitle)
 
@@ -255,12 +220,12 @@
         # package was added).
         eu = doc.createElement("updated")
         ut = xmini.Text()
-        ut.replaceWholeText(ults_to_rfc3339_str(txn["timestamp"]))
+        ut.replaceWholeText(dt_to_rfc3339_str(op_time))
         eu.appendChild(ut)
         e.appendChild(eu)
 
         # Link to the info output for the given package FMRI.
-        e_uri = get_rel_path(request, 'info/0/%s' % f.get_url_path())
+        e_uri = get_rel_path(request, "info/0/%s" % urllib.quote(str(pfmri)))
 
         l = doc.createElement("link")
         l.setAttribute("rel", "alternate")
@@ -269,9 +234,7 @@
 
         # Using the description for the operation performed, add the FMRI and
         # tag information.
-        content_text = op_content % fmri_str
-        if tag == "C":
-                content_text += "  This version is tagged as critical."
+        content_text = op_content % pfmri
 
         co = xmini.Text()
         co.replaceWholeText(content_text)
@@ -281,50 +244,112 @@
 
         feed.appendChild(e)
 
-def update(request, scfg, rcfg, t, cf):
+def get_updates_needed(repo, ts):
+        """Returns a list of the CatalogUpdate files that contain the changes
+        that have been made to the catalog since the specified UTC datetime
+        object 'ts'."""
+
+        c = repo.catalog
+        if c.last_modified <= ts:
+                # No updates needed.
+                return set()
+
+        updates = set()
+        for name, mdata in c.updates.iteritems():
+
+                # The last component of the update name is the locale.
+                locale = name.split(".", 2)[2]
+
+                # For now, only look at CatalogUpdates that for the 'C'
+                # locale.  Any other CatalogUpdates just contain localized
+                # catalog data, so aren't currently interesting.
+                if locale != "C":
+                        continue
+
+                ulog_lm = mdata["last-modified"]
+                if ulog_lm <= ts:
+                        # CatalogUpdate hasn't changed since 'ts'.
+                        continue
+                updates.add(name)
+
+        if not updates: 
+                # No updates needed.
+                return set()
+
+        # Ensure updates are in chronological ascending order.
+        return sorted(updates)
+
+def update(request, repo, last, cf):
         """Generate new Atom document for current updates.  The cached feed
-        file is written to scfg.feed_cache_root/CACHE_FILENAME.
+        file is written to repo.feed_cache_root/CACHE_FILENAME.
         """
 
-        # Our configuration is stored in hours, convert it to seconds.
-        window_seconds = rcfg.get_attribute("feed", "window") * 60 * 60
-        feed_ts = datetime.datetime.fromtimestamp(t - window_seconds)
+        # Our configuration is stored in hours, convert it to days and seconds.
+        hours = repo.cfg.get_property("feed", "window")
+        days, hours = divmod(hours, 24)
+        seconds = hours * 60 * 60
+        feed_ts = last - datetime.timedelta(days=days, seconds=seconds)
 
         d = xmini.Document()
 
         feed = d.createElementNS("http://www.w3.org/2005/Atom", "feed")
         feed.setAttribute("xmlns", "http://www.w3.org/2005/Atom")
 
-        set_title(request, rcfg, d, feed, scfg.updatelog.last_update)
+        set_title(repo, d, feed, repo.catalog.last_modified)
 
         d.appendChild(feed)
 
-        # The feed should be presented in reverse chronological order.
-        def compare_ul_entries(a, b):
-                return cmp(ults_to_ts(a["timestamp"]),
-                    ults_to_ts(b["timestamp"]))
+        # Cache the first entry in the catalog for any given package stem found
+        # in the list of updates so that it can be used to quickly determine if
+        # the fmri in the update is a 'new' package or an update to an existing
+        # package.
+        c = repo.catalog
+
+        first = {}
+        def get_first(f):
+                stem = f.get_pkg_stem()
+                if stem in first:
+                        return first[stem]
+
+                for v, entries in c.entries_by_version(f.pkg_name):
+                        # The first version returned is the oldest version.
+                        # Add all of the unique package stems for that version
+                        # to the list.
+                        for efmri, edata in entries:
+                                first[efmri.get_pkg_stem()] = efmri
+                        break
 
-        # Get the entire catalog in the format returned by catalog.cache_fmri,
-        # so that we don't have to keep looking for possible matches.
-        fmris = {}
-        catalog.ServerCatalog.read_catalog(fmris,
-            scfg.updatelog.catalog.catalog_root)
+                if stem not in first:
+                        # A value of None is used to denote that no previous
+                        # version exists for this particular stem.  This could
+                        # happen when a prior version exists for a different
+                        # publisher, or no prior version exists at all.
+                        first[stem] = None
+                return first[stem]
 
-        for txn in sorted(scfg.updatelog.gen_updates_as_dictionaries(feed_ts),
-            cmp=compare_ul_entries, reverse=True):
-                add_transaction(request, scfg, rcfg, d, feed, txn, fmris)
+        # Updates should be presented in reverse chronological order.
+        for name in reversed(get_updates_needed(repo, feed_ts)):
+                ulog = catalog.CatalogUpdate(name, meta_root=c.meta_root)
+                for entry in ulog.updates():
+                        pfmri = entry[0]
+                        op_time = entry[2]
+                        if op_time <= feed_ts:
+                                # Exclude this particular update.
+                                continue
+                        add_transaction(request, d, feed, entry,
+                            get_first(pfmri))
 
         d.writexml(cf)
 
-def __get_cache_pathname(scfg):
-        return os.path.join(scfg.feed_cache_root, CACHE_FILENAME)
+def __get_cache_pathname(repo):
+        return os.path.join(repo.feed_cache_root, CACHE_FILENAME)
 
-def __clear_cache(scfg):
-        if scfg.feed_cache_read_only():
-                # Ignore the request due to server configuration.
+def __clear_cache(repo):
+        if repo.read_only and repo.writable_root:
+                # Ignore the request due to repository configuration.
                 return
 
-        pathname = __get_cache_pathname(scfg)
+        pathname = __get_cache_pathname(repo)
         try:
                 if os.path.exists(pathname):
                         os.remove(pathname)
@@ -333,13 +358,13 @@
                     httplib.INTERNAL_SERVER_ERROR,
                     "Unable to clear feed cache.")
 
-def __cache_needs_update(scfg):
+def __cache_needs_update(repo):
         """Checks to see if the feed cache file exists and if it is still
         valid.  Returns False, None if the cache is valid or True, last
         where last is a timestamp representing when the cache was
         generated.
         """
-        cfpath = __get_cache_pathname(scfg)
+        cfpath = __get_cache_pathname(repo)
         last = None
         need_update = True
         if os.path.isfile(cfpath):
@@ -349,7 +374,7 @@
                         d = xmini.parse(cfpath)
                 except Exception:
                         d = None
-                        __clear_cache(scfg)
+                        __clear_cache(repo)
 
                 # Get the feed element and attempt to get the time we last
                 # generated the feed to determine whether we need to regenerate
@@ -367,49 +392,46 @@
                                         break
 
                         if utn:
-                                last_ts = rfc3339_str_to_dt(utn.nodeValue)
+                                last = rfc3339_str_to_dt(utn.nodeValue)
 
                                 # Since our feed cache and updatelog might have
                                 # been created within the same second, we need
                                 # to ignore small variances when determining
                                 # whether to update the feed cache.
-                                update_ts = scfg.updatelog.last_update.replace(
-                                    microsecond=0)
-
-                                if last_ts >= update_ts:
+                                up_ts = copy.copy(repo.catalog.last_modified)
+                                up_ts.replace(microsecond=0)
+                                if last >= up_ts:
                                         need_update = False
-                                else:
-                                        last = rfc3339_str_to_ts(utn.nodeValue)
                         else:
-                                __clear_cache(scfg)
+                                __clear_cache(repo)
                 else:
-                        __clear_cache(scfg)
+                        __clear_cache(repo)
 
         return need_update, last
 
-def handle(scfg, rcfg, request, response):
+def handle(repo, request, response):
         """If there have been package updates since we last generated the feed,
         update the feed and send it to the client.  Otherwise, send them the
         cached copy if it is available.
         """
 
-        cfpath = __get_cache_pathname(scfg)
+        cfpath = __get_cache_pathname(repo)
 
         # First check to see if we already have a valid cache of the feed.
-        need_update, last = __cache_needs_update(scfg)
+        need_update, last = __cache_needs_update(repo)
 
         if need_update:
                 # Update always looks at feed.window seconds before the last
                 # update until "now."  If last is none, we want it to use "now"
                 # as its starting point.
                 if last is None:
-                        last = time.time()
+                        last = datetime.datetime.utcnow()
 
-                if scfg.feed_cache_read_only():
+                if repo.read_only and not repo.writable_root:
                         # If the server is operating in readonly mode, the
                         # feed will have to be generated every time.
                         cf = cStringIO.StringIO()
-                        update(request, scfg, rcfg, last, cf)
+                        update(request, repo, last, cf)
                         cf.seek(0)
                         buf = cf.read()
                         cf.close()
@@ -427,7 +449,7 @@
                         # If the server isn't operating in readonly mode, the
                         # feed can be generated and cached in inst_dir.
                         cf = file(cfpath, "w")
-                        update(request, scfg, rcfg, last, cf)
+                        update(request, repo, last, cf)
                         cf.close()
 
         return serve_file(cfpath, MIME_TYPE)
--- a/src/modules/server/repository.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/repository.py	Fri Oct 23 17:43:37 2009 -0500
@@ -22,15 +22,34 @@
 # Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
 # Use is subject to license terms.
 
+import datetime
 import errno
+import logging
 import os
+import os.path
+import shutil
+import signal
+import sys
+import tempfile
+import threading
+import urllib
 
+import pkg.actions as actions
+import pkg.catalog as catalog
+import pkg.client.api_errors as api_errors
 import pkg.fmri as fmri
+import pkg.indexer as indexer
+import pkg.manifest as manifest
+import pkg.portable as portable
 import pkg.misc as misc
-import pkg.server.catalog as catalog
+import pkg.pkgsubprocess as subprocess
+import pkg.search_errors as se
 import pkg.server.query_parser as query_p
 import pkg.server.repositoryconfig as rc
 import pkg.server.transaction as trans
+import pkg.version as version
+
+from pkg.misc import EmptyI, EmptyDict
 
 class RepositoryError(Exception):
         """Base exception class for all Repository exceptions."""
@@ -44,15 +63,13 @@
                 return str(self.data)
 
 
-class RepositoryCatalogNoUpdatesError(RepositoryError):
-        """Used to indicate that no updates are available for the catalog.  The
-        first argument should be the type of updates requested; the second
-        should be date the catalog was last modified."""
+class RepositoryCatalogNotFoundError(RepositoryError):
+        """Used to indicate that a file for the specified catalog name does not
+        exist."""
 
-        def __init__(self, *args):
-                RepositoryError.__init__(self, *args)
-                if args:
-                        self.last_modified = args[1]
+        def __str__(self):
+                return _("The specified catalog file '%s', could not be "
+                    "found.") % self.data
 
 
 class RepositoryFileNotFoundError(RepositoryError):
@@ -64,6 +81,15 @@
                     "hash name: '%s'.") % self.data
 
 
+class RepositoryInvalidError(RepositoryError):
+        """Used to indicate that a valid repository could not be found at the
+        specified location."""
+
+        def __str__(self):
+                return _("The specified repository root '%s' does not contain "
+                    "a valid repository.") % self.data
+
+
 class RepositoryInvalidFMRIError(RepositoryError):
         """Used to indicate that the FMRI provided is invalid."""
 
@@ -84,15 +110,21 @@
                     self.data
 
 
-class RepositoryRenameFailureError(RepositoryError):
-        """Used to indicate that the rename could not be performed.  The first
-        argument should be the object representing the duplicate FMRI."""
+class RepositoryMirrorError(RepositoryError):
+        """Used to indicate that the requested operation could not be performed
+        as the repository is in mirror mode."""
 
         def __str__(self):
-                return _("Unable to rename the request FMRI: '%s'; ensure that "
-                    "the source FMRI exists in the catalog and that the "
-                    "destination FMRI does not already exist in the "
-                    "catalog.") % self.data
+                return _("The requested operation cannot be performed when the "
+                    "repository is used in mirror mode.")
+
+
+class RepositoryReadOnlyError(RepositoryError):
+        """Used to indicate that the requested operation could not be performed
+        as the repository is currently read-only."""
+
+        def __str__(self):
+                return _("The repository is read-only and cannot be modified.")
 
 
 class RepositorySearchTokenError(RepositoryError):
@@ -114,20 +146,420 @@
                 return _("Search functionality is temporarily unavailable.")
 
 
+class RepositoryUpgradeError(RepositoryError):
+        """Used to indicate that the specified repository root cannot be used
+        as the catalog or format of it is an older version that needs to be
+        upgraded before use and cannot be."""
+
+        def __str__(self):
+                return _("The format of the repository or its contents needs "
+                    "to be upgraded before it can be used to serve package "
+                    "data.  However, it is currently read-only and cannot be "
+                    "upgraded.  If using pkg.depotd, please restart the server "
+                    "without read-only so that the repository can be upgraded.")
+
+
 class Repository(object):
         """A Repository object is a representation of data contained within a
         pkg(5) repository and an interface to manipulate it."""
 
-        def __init__(self, scfg, cfgpathname=None):
+        __catalog = None
+        __lock = None
+
+        def __init__(self, auto_create=False, catalog_root=None, 
+            cfgpathname=None, fork_allowed=False, index_root=None, log_obj=None,
+            mirror=False, pkg_root=None, properties=EmptyDict, read_only=False,
+            repo_root=None, trans_root=None, writable_root=None):
                 """Prepare the repository for use."""
 
+                # This lock is used to protect the repository from multiple
+                # threads modifying it at the same time.
+                self.__lock = threading.Lock()
+
+                self.auto_create = auto_create
+                self.cfg = None
                 self.cfgpathname = None
-                self.rcfg = None
-                self.scfg = scfg
-                self.__searching = False
-                self.load_config(cfgpathname)
+                self.fork_allowed = fork_allowed
+                self.log_obj = log_obj
+                self.mirror = mirror
+                self.read_only = read_only
+                self.__tmp_root = None
+
+                # Must be set before other roots.
+                self.repo_root = repo_root
+
+                # These are all overrides for the default values that setting
+                # repo_root will provide.  If a caller provides one of these,
+                # they are responsible for creating the corresponding path
+                # and setting its mode appropriately.
+                if catalog_root:
+                        self.catalog_root = catalog_root
+                if index_root:
+                        self.index_root = index_root
+                if pkg_root:
+                        self.pkg_root = pkg_root
+                if trans_root:
+                        self.trans_root = trans_root
+                
+                # Must be set before writable_root.
+                self.__required_dirs = [self.trans_root, self.file_root,
+                    self.pkg_root, self.catalog_root]
+
+                # Ideally, callers would just specify overrides for the feed
+                # cache root, index_root, etc.  But this must be set after all
+                # of the others above.
+                self.writable_root = writable_root
+
+                # Must be set after all other roots.
+                self.__optional_dirs = [self.index_root]
+
+                # Stats
+                self.catalog_requests = 0
+                self.manifest_requests = 0
+                self.file_requests = 0
+                self.flist_requests = 0
+                self.flist_files = 0
+                self.pkgs_renamed = 0
+
+                # The update_handle lock protects the update_handle variable.
+                # This allows update_handle to be checked and acted on in a
+                # consistent step, preventing the dropping of needed updates.
+                # The check at the top of refresh index should always be done
+                # prior to deciding to spin off a process for indexing as it
+                # prevents more than one indexing process being run at the same
+                # time.
+                self.__searchdb_update_handle_lock = threading.Lock()
+
+                if os.name == "posix" and self.fork_allowed:
+                        try:
+                                signal.signal(signal.SIGCHLD,
+                                    self._child_handler)
+                        except ValueError:
+                                self.__log("Tried to create signal handler in "
+                                    "a thread other than the main thread.")
+
+                self.__searchdb_update_handle = None
+                self.__search_available = False
+                self.__deferred_searchdb_updates = []
+                self.__deferred_searchdb_updates_lock = threading.Lock()
+                self.__refresh_again = False
+
+                # Initialize.
+                self.__lock_repository()
+                try:
+                        self.__init_config(cfgpathname=cfgpathname,
+                            properties=properties)
+                        self.__init_dirs()
+                        self.__init_state()
+                finally:
+                        self.__unlock_repository()
+
+        def _child_handler(self, sig, frame):
+                """ Handler method for the SIGCHLD signal.  Checks to see if the
+                search database update child has finished, and enables searching
+                if it finished successfully, or logs an error if it didn't.
+                """
+
+                try:
+                        signal.signal(signal.SIGCHLD, self._child_handler)
+                except ValueError:
+                        self.__log("Tried to create signal handler in a thread "
+                            "other than the main thread.")
+
+                # If there's no update_handle, then another subprocess was
+                # spun off and that was what finished. If the poll() returns
+                # None, then while the indexer was running, another process
+                # that was spun off finished.
+                rval = None
+                if not self.__searchdb_update_handle:
+                        return
+                rval = self.__searchdb_update_handle.poll()
+                if rval == None:
+                        return
+
+                if rval == 0:
+                        self.__search_available = True
+                        self.__index_log("Search indexes updated and "
+                            "available.")
+                        # Need to acquire this lock to prevent the possibility
+                        # of a race condition with refresh_index where a needed
+                        # refresh is dropped. It is possible that an extra
+                        # refresh will be done with this code, but that refresh
+                        # should be very quick to finish.
+                        self.__searchdb_update_handle_lock.acquire()
+                        self.__searchdb_update_handle = None
+                        self.__searchdb_update_handle_lock.release()
+
+                        if self.__refresh_again:
+                                self.__refresh_again = False
+                                self.refresh_index()
+                elif rval > 0:
+                        # If the refresh of the index failed, defensively
+                        # declare that search is unavailable.
+                        self.__index_log("ERROR building search database, exit "
+                            "code: %s" % rval)
+                        try:
+                                self.__log(
+                                    self.__searchdb_update_handle.stderr.read())
+                                self.__searchdb_update_handle.stderr.read()
+                        except KeyboardInterrupt:
+                                raise
+                        except:
+                                pass
+                        self.__searchdb_update_handle_lock.acquire()
+                        self.__searchdb_update_handle = None
+                        self.__searchdb_update_handle_lock.release()
+
+        def __mkdtemp(self):
+                """Create a temp directory under repository directory for
+                various purposes."""
+
+                root = self.repo_root
+                if self.writable_root:
+                        root = self.writable_root
+
+                tempdir = os.path.normpath(os.path.join(root, "tmp"))
+                try:
+                        if not os.path.exists(tempdir):
+                                os.makedirs(tempdir)
+                        return tempfile.mkdtemp(dir=tempdir)
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise api_errors.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
+                        raise
+
+        def __upgrade(self):
+                """Upgrades the repository's format and contents if needed."""
+
+                def get_file_lm(pathname):
+                        try:
+                                mod_time = os.stat(pathname).st_mtime
+                        except EnvironmentError, e:
+                                if e.errno == errno.ENOENT:
+                                        return None
+                                raise
+                        return datetime.datetime.utcfromtimestamp(mod_time)
+
+                # To determine if an upgrade is needed, first check for a v0
+                # catalog attrs file.
+                need_upgrade = False
+                v0_attrs = os.path.join(self.catalog_root, "attrs")
+
+                # The only place a v1 catalog should exist, at all,
+                # is either in self.catalog_root, or in a subdirectory
+                # of self.writable_root if a v0 catalog exists.
+                v1_cat = None
+                writ_cat_root = None
+                if self.writable_root:
+                        writ_cat_root = os.path.join(
+                            self.writable_root, "catalog")
+                        v1_cat = catalog.Catalog(
+                            meta_root=writ_cat_root, read_only=True)
+
+                v0_lm = None
+                if os.path.exists(v0_attrs):
+                        # If a v0 catalog exists, then assume any existing v1
+                        # catalog needs to be kept in sync if it exists.  If
+                        # one doesn't exist, then it needs to be created.
+                        v0_lm = get_file_lm(v0_attrs)
 
-        def load_config(self, cfgpathname=None):
+                        if v1_cat and v1_cat.exists:
+                                if v0_lm != v1_cat.last_modified:
+                                        # If the old v0 catalog has changed then
+                                        # the v1 catalog needs to be rebuilt.
+                                        need_upgrade = True
+                        else:
+                                # If a v0 catalog exists and a v1 doesn't exist,
+                                # an upgrade is needed.
+                                need_upgrade = True
+                elif v1_cat:
+                        # A v1 catalog exists in the writable_root, but no v0
+                        # catalog exists.  This is likely from a v0 repository
+                        # that has since been upgraded to v1 and is now being
+                        # run read-only again with a writable root.  Assume that
+                        # the v1 catalog in the writable root should not be used
+                        # and destroy it.
+                        v1_cat.destroy()
+                        shutil.rmtree(writ_cat_root, True)
+                        writ_cat_root = None
+                        v1_cat = None
+
+                if v1_cat and writ_cat_root:
+                        # The catalog lives in the writable_root.
+                        self.catalog_root = writ_cat_root
+
+                if not need_upgrade or self.mirror:
+                        # If an upgrade isn't needed, or this is a mirror, then
+                        # nothing should be done to the existing catalog data.
+                        return
+
+                if self.read_only and not self.writable_root:
+                        # Any further operations would attempt to alter the
+                        # existing catalog data, which can't be done due to
+                        # read_only status.
+                        raise RepositoryUpgradeError()
+
+                if self.catalog.exists:
+                        # v1 catalog should be destroyed if it exists already.
+                        self.catalog.destroy()
+                elif writ_cat_root and not os.path.exists(writ_cat_root):
+                        try:
+                                os.mkdir(writ_cat_root, 0755)
+                        except EnvironmentError, e:
+                                if e.errno == errno.EACCES:
+                                        raise api_errors.PermissionsException(
+                                            e.filename)
+                                if e.errno == errno.EROFS:
+                                        raise api_errors.ReadOnlyFileSystemException(
+                                            e.filename)
+                                raise
+
+                # To upgrade the repository, the catalog will have to
+                # be rebuilt.
+                self.__log(_("Upgrading repository; this process will "
+                    "take some time."))
+                self.__rebuild(default_pub=self.cfg.get_property(
+                    "publisher", "prefix"), lm=v0_lm)
+
+                if not self.read_only:
+                        v0_cat = os.path.join(self.repo_root, "catalog",
+                            "catalog")
+                        for f in v0_attrs, v0_cat:
+                                if os.path.exists(f):
+                                        portable.remove(f)
+
+                        # If this fails, it doesn't really matter, but it should
+                        # be removed if possible.
+                        shutil.rmtree(os.path.join(self.repo_root, "updatelog"),
+                            True)
+
+        def __add_package(self, pfmri, manifest=None):
+                """Private version; caller responsible for repository
+                locking."""
+
+                if not manifest:
+                        manifest = self._get_manifest(pfmri)
+                c = self.catalog
+                c.add_package(pfmri, manifest=manifest)
+
+        def __check_search(self):
+                if not self.index_root:
+                        return
+
+                ind = indexer.Indexer(self.index_root,
+                    self._get_manifest, self.manifest,
+                    log=self.__index_log)
+                cie = False
+                try:
+                        cie = ind.check_index_existence()
+                except se.InconsistentIndexException:
+                        pass
+                if cie:
+                        self.__search_available = True
+                        self.__index_log("Search Available")
+
+        def __destroy_catalog(self):
+                """Destroy the catalog."""
+
+                self.__catalog = None
+                if os.path.exists(self.catalog_root):
+                        shutil.rmtree(self.catalog_root)
+
+        @staticmethod
+        def __fmri_from_path(pkg, ver):
+                """Helper method that takes the full path to the package
+                directory and the name of the manifest file, and returns an FMRI
+                constructed from the information in those components."""
+
+                v = version.Version(urllib.unquote(ver), None)
+                f = fmri.PkgFmri(urllib.unquote(os.path.basename(pkg)))
+                f.version = v
+                return f
+
+        def _get_manifest(self, pfmri):
+                """This function should be private; but is protected instead due
+                to its usage as a callback."""
+
+                mpath = self.manifest(pfmri)
+                m = manifest.Manifest()
+                try:
+                        f = open(mpath, "rb")
+                        content = f.read()
+                        f.close()
+                except EnvironmentError, e:
+                        if e.errno == errno.ENOENT:
+                                raise RepositoryManifestNotFoundError(
+                                    e.filename)
+                        raise
+                m.set_fmri(None, pfmri)
+                m.set_content(content, EmptyI)
+                return m
+
+        def __get_catalog_root(self):
+                return self.__catalog_root
+
+        def __get_repo_root(self):
+                return self.__repo_root
+
+        def __get_writable_root(self):
+                return self.__writable_root
+
+        def __index_log(self, msg):
+                return self.__log(msg, "INDEX")
+
+        def __init_config(self, cfgpathname=None, properties=EmptyDict):
+                """Private helper function to initialize configuration."""
+
+                # Load configuration information.
+                if not cfgpathname:
+                        cfgpathname = self.cfgpathname
+                self.__load_config(cfgpathname, properties=properties)
+
+                # Set any specified properties.
+                for section in properties:
+                        for prop, value in properties[section].iteritems():
+                                self.cfg.set_property(section, prop, value)
+
+                # Verify that all required configuration information is set.
+                self.cfg.validate()
+
+        def __init_dirs(self):
+                """Verify and instantiate repository directory structure."""
+                emsg = _("repository directories incomplete")
+                for d in self.__required_dirs + self.__optional_dirs:
+                        if self.auto_create or (self.writable_root and
+                            d.startswith(self.writable_root)):
+                                try:
+                                        os.makedirs(d)
+                                except EnvironmentError, e:
+                                        if e.errno in (errno.EACCES,
+                                            errno.EROFS):
+                                                emsg = _("repository "
+                                                    "directories not writeable "
+                                                    "by current user id or "
+                                                    "group and are incomplete")
+                                        elif e.errno != errno.EEXIST:
+                                                raise
+
+                for d in self.__required_dirs:
+                        if not os.path.exists(d):
+                                if self.auto_create:
+                                        raise RepositoryError(emsg)
+                                raise RepositoryInvalidError(self.repo_root)
+
+                searchdb_file = os.path.join(self.repo_root, "search")
+                for ext in ".pag", ".dir":
+                        try:
+                                os.unlink(searchdb_file + ext)
+                        except OSError:
+                                # If these can't be removed, it doesn't matter.
+                                continue
+
+        def __load_config(self, cfgpathname=None, properties=EmptyDict):
                 """Load stored configuration data and configure the repository
                 appropriately."""
 
@@ -135,36 +567,355 @@
 
                 # Now load our repository configuration / metadata.
                 if cfgpathname is None:
-                        cfgpathname = os.path.join(self.scfg.repo_root,
+                        cfgpathname = os.path.join(self.repo_root,
                             "cfg_cache")
                         default_cfg_path = True
 
                 # Create or load the repository configuration.
                 try:
-                        self.rcfg = rc.RepositoryConfig(pathname=cfgpathname)
+                        self.cfg = rc.RepositoryConfig(pathname=cfgpathname,
+                            properties=properties)
                 except RuntimeError:
                         if not default_cfg_path:
                                 raise
 
                         # If it doesn't exist, just create a new object, it will
                         # automatically be populated with sane defaults.
-                        self.rcfg = rc.RepositoryConfig()
+                        self.cfg = rc.RepositoryConfig()
 
                 self.cfgpathname = cfgpathname
 
-        def write_config(self):
+        def __load_in_flight(self):
+                """Walk trans_root, acquiring valid transaction IDs."""
+
+                if self.mirror:
+                        # Mirrors don't permit publication.
+                        return
+
+                self.__in_flight_trans = {}
+                for txn in os.walk(self.trans_root):
+                        if txn[0] == self.trans_root:
+                                continue
+                        t = trans.Transaction()
+                        t.reopen(self, txn[0])
+                        self.__in_flight_trans[t.get_basename()] = t
+
+        def __lock_repository(self):
+                """Locks the repository preventing multiple consumers from
+                modifying it during operations."""
+
+                # XXX need filesystem lock too?
+                self.__lock.acquire()
+
+        def __log(self, msg, context="", severity=logging.INFO):
+                if self.log_obj:
+                        self.log_obj.log(msg=msg, context=context,
+                            severity=severity)
+
+        def __rebuild(self, default_pub=None, lm=None):
+                """Private version; caller responsible for repository
+                locking."""
+
+                if self.read_only:
+                        # Temporarily mark catalog as not read-only so that it
+                        # can be modified.
+                        self.catalog.read_only = False
+
+                # Set batch_mode for catalog to speed up rebuild.
+                self.catalog.batch_mode = True
+
+                # Pointless to log incremental updates since a new catalog
+                # is being built.  This also helps speed up rebuild.
+                self.catalog.log_updates = False
+
+                def add_package(f):
+                        m = self._get_manifest(f)
+                        if "pkg.fmri" in m:
+                                f = fmri.PkgFmri(m["pkg.fmri"])
+                        elif default_pub and not f.publisher:
+                                f.publisher = default_pub
+                        self.__log(str(f))
+                        self.__add_package(f, manifest=m)
+
+                # XXX eschew os.walk in favor of another os.listdir here?
+                for pkg in os.walk(self.pkg_root):
+                        if pkg[0] == self.pkg_root:
+                                continue
+
+                        for e in os.listdir(pkg[0]):
+                                f = self.__fmri_from_path(pkg[0], e)
+                                try:
+                                        add_package(f)
+                                except actions.ActionError, e:
+                                        # Don't add packages with corrupt
+                                        # manifests to the catalog.
+                                        self.__log(_("Skipping %(fmri)s; "
+                                            "invalid manifest: %(error)s") % {
+                                            "fmri": f, "error": e })
+
+                # Private add_package doesn't automatically save catalog
+                # so that operations can be batched (there is significant
+                # overhead in writing the catalog).
+                self.catalog.batch_mode = False
+                self.catalog.log_updates = True
+                self.catalog.read_only = self.read_only
+                self.__save_catalog(lm=lm)
+
+        def __refresh_index(self):
+                """Private version; caller responsible for repository
+                locking."""
+
+                self.__searchdb_update_handle_lock.acquire()
+
+                if self.__searchdb_update_handle:
+                        self.__refresh_again = True
+                        self.__searchdb_update_handle_lock.release()
+                        return
+
+                cat = self.catalog
+
+                try:
+                        fmris_to_index = set(cat.fmris())
+
+                        indexer.Indexer.check_for_updates(self.index_root,
+                            fmris_to_index)
+
+                        pub = self.cfg.get_property("publisher", "prefix")
+                        if fmris_to_index:
+                                if os.name == "posix" and self.fork_allowed:
+                                        cmd = self.__whence(sys.argv[0])
+                                        args = (sys.executable, cmd,
+                                            "--refresh-index", "-d",
+                                            self.repo_root)
+                                        if pub:
+                                                args += ("--set-property",
+                                                    "publisher.prefix=%s" % pub)
+                                        if os.path.normpath(
+                                            self.index_root) != \
+                                            os.path.normpath(os.path.join(
+                                            self.repo_root, "index")):
+                                                writ, t = os.path.split(
+                                                    self.index_root)
+                                                args += ("--writable-root",
+                                                    writ)
+                                        if self.read_only:
+                                                args += ("--readonly",)
+                                        try:
+                                                self.__searchdb_update_handle = \
+                                                    subprocess.Popen(args,
+                                                    stderr=subprocess.STDOUT)
+                                        except Exception, e:
+                                                self.__log("Starting the "
+                                                    "indexing process failed: "
+                                                    "%s" % e)
+                                                raise
+                                else:
+                                        self.run_update_index()
+                        else:
+                                # Since there is nothing to index, setup
+                                # the index and declare search available.
+                                # We only log this if this represents
+                                # a change in status of the server.
+                                ind = indexer.Indexer(self.index_root,
+                                    self._get_manifest,
+                                    self.manifest,
+                                    log=self.__index_log)
+                                ind.setup()
+                                if not self.__search_available:
+                                        self.__index_log("Search Available")
+                                self.__search_available = True
+                finally:
+                        self.__searchdb_update_handle_lock.release()
+
+        def __init_state(self):
+                """Private version; caller responsible for repository
+                locking."""
+
+                # Discard current catalog information (it will be re-loaded
+                # when needed).
+                self.__catalog = None
+
+                # Load in-flight transaction information.
+                self.__load_in_flight()
+
+                # Ensure default configuration is written.
+                self.__write_config()
+
+                # Ensure repository state is current before attempting
+                # to load it.
+                self.__upgrade()
+
+                if self.mirror:
+                        # In mirror-mode, nothing else to do.
+                        return
+
+                # If no catalog exists on-disk yet, ensure an empty one does
+                # so that clients can discern that a repository has an empty
+                # empty catalog, as opposed to missing one entirely (which
+                # could easily happen with multiple origins).  This must be
+                # done before the search checks below.
+                if not self.read_only and not self.catalog.exists:
+                        self.catalog.save()
+
+                if not self.read_only or self.writable_root:
+                        try:
+                                try:
+                                        self.__refresh_index()
+                                except se.InconsistentIndexException, e:
+                                        s = _("Index corrupted or out of date. "
+                                            "Removing old index directory (%s) "
+                                            " and rebuilding search "
+                                            "indexes.") % e.cause
+                                        self.__log(s, "INDEX")
+                                        shutil.rmtree(self.index_root)
+                                        try:
+                                                self.__refresh_index()
+                                        except se.IndexingException, e:
+                                                self.__log(str(e), "INDEX")
+                                except se.IndexingException, e:
+                                        self.__log(str(e), "INDEX")
+                        except EnvironmentError, e:
+                                if e.errno in (errno.EACCES, errno.EROFS):
+                                        if self.writable_root:
+                                                raise RepositoryError(
+                                                    _("writable root not "
+                                                    "writable by current user "
+                                                    "id or group."))
+                                        raise RepositoryError(_("unable to "
+                                            "write to index directory."))
+                                raise
+                else:
+                        self.__check_search()
+
+        def __save_catalog(self, lm=None):
+                """Private helper function that attempts to save the catalog in
+                an atomic fashion."""
+
+                # Ensure new catalog is created in a temporary location so that
+                # it can be renamed into place *after* creation to prevent
+                # unexpected failure from causing future upgrades to fail.
+                old_cat_root = self.catalog_root
+                tmp_cat_root = self.__mkdtemp()
+
+                if os.path.exists(old_cat_root):
+                        # Now remove the temporary directory and then copy the
+                        # contents of the existing catalog directory to the new,
+                        # temporary name.  This is necessary since the catalog
+                        # only saves the data that has been loaded or changed,
+                        # so new parts will get written out, but old ones could
+                        # be lost.
+                        shutil.rmtree(tmp_cat_root)
+                        shutil.copytree(old_cat_root, tmp_cat_root)
+
+                # Ensure the permissions on the new temporary catalog
+                # directory are correct.
+                try:
+                        os.chmod(tmp_cat_root, 0755)
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise api_errors.PermissionsException(
+                                    e.filename)
+                        raise
+
+                # Save the new catalog data in the temporary location.
+                self.catalog_root = tmp_cat_root
+                if lm:
+                        self.catalog.last_modified = lm
+                self.catalog.save()
+
+                orig_cat_root = None
+                if os.path.exists(old_cat_root):
+                        # Preserve the old catalog data before continuing.
+                        orig_cat_root = os.path.join(os.path.dirname(
+                            old_cat_root), "old." + os.path.basename(
+                            old_cat_root))
+                        shutil.move(old_cat_root, orig_cat_root)
+
+                # Finally, rename the new catalog data into place, reset the
+                # catalog's location, and remove the old catalog data.
+                shutil.move(tmp_cat_root, old_cat_root)
+                self.catalog_root = old_cat_root
+                if orig_cat_root:
+                        shutil.rmtree(orig_cat_root)
+
+        def __set_catalog_root(self, root):
+                self.__catalog_root = root
+                if self.__catalog:
+                        # If the catalog is loaded already, then reset
+                        # its meta_root.
+                        self.catalog.meta_root = root
+
+        def __set_repo_root(self, root):
+                assert root is not None
+
+                root = os.path.abspath(root)
+                self.__repo_root = root
+                self.__tmp_root = os.path.join(root, "tmp")
+                self.catalog_root = os.path.join(root, "catalog")
+                self.feed_cache_root = root
+                self.file_root = os.path.join(root, "file")
+                self.index_root = os.path.join(root, "index")
+                self.pkg_root = os.path.join(root, "pkg")
+                self.trans_root = os.path.join(root, "trans")
+
+        def __set_writable_root(self, root):
+                if root is not None:
+                        root = os.path.abspath(root)
+                        self.__tmp_root = os.path.join(root, "tmp")
+                        self.feed_cache_root = root
+                        self.index_root = os.path.join(root, "index")
+                else:
+                        self.__tmp_root = os.path.join(self.repo_root, "tmp")
+                        self.feed_cache_root = self.repo_root
+                        self.index_root = os.path.join(self.repo_root, "index")
+                self.__writable_root = root
+
+        def __unlock_repository(self):
+                """Unlocks the repository so other consumers may modify it."""
+
+                # XXX need filesystem unlock too?
+                self.__lock.release()
+
+        def __update_searchdb_unlocked(self, fmris):
+                """ Creates an indexer then hands it fmris It assumes that all
+                needed locking has already occurred.
+                """
+                assert self.index_root
+
+                if fmris:
+                        index_inst = indexer.Indexer(self.index_root,
+                            self._get_manifest, self.manifest,
+                            log=self.__index_log)
+                        index_inst.server_update_index(fmris)
+
+        @staticmethod
+        def __whence(cmd):
+                if cmd[0] != '/':
+                        tmp_cmd = cmd
+                        cmd = None
+                        path = os.environ['PATH'].split(':')
+                        path.append(os.environ['PWD'])
+                        for p in path:
+                                if os.path.exists(os.path.join(p, tmp_cmd)):
+                                        cmd = os.path.join(p, tmp_cmd)
+                                        break
+                        assert cmd
+                return cmd
+
+        def __write_config(self):
                 """Save the repository's current configuration data."""
 
                 # No changes should be written to disk in readonly mode.
-                if self.scfg.is_read_only():
+                if self.read_only:
                         return
 
                 # Save a new configuration (or refresh existing).
                 try:
-                        self.rcfg.write(self.cfgpathname)
+                        self.cfg.write(self.cfgpathname)
                 except EnvironmentError, e:
-                        # If we're unable to write due to the following errors,
-                        # it isn't critical to the operation of the repository.
+                        # If we're unable to write due to the following
+                        # errors, it isn't critical to the operation of
+                        # the repository.
                         if e.errno not in (errno.EPERM, errno.EACCES,
                             errno.EROFS):
                                 raise
@@ -173,65 +924,120 @@
                 """Aborts a transaction with the specified Transaction ID.
                 Returns the current package state."""
 
-                try:
-                        t = self.scfg.in_flight_trans[trans_id]
-                except KeyError:
-                        raise RepositoryInvalidTransactionIDError(trans_id)
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
 
+                self.__lock_repository()
                 try:
-                        pstate = t.abandon()
-                        del self.scfg.in_flight_trans[trans_id]
-                        return pstate
-                except trans.TransactionError, e:
-                        raise RepositoryError(e)
+                        try:
+                                t = self.__in_flight_trans[trans_id]
+                        except KeyError:
+                                raise RepositoryInvalidTransactionIDError(
+                                    trans_id)
+
+                        try:
+                                pstate = t.abandon()
+                                del self.__in_flight_trans[trans_id]
+                                return pstate
+                        except trans.TransactionError, e:
+                                raise RepositoryError(e)
+                finally:
+                        self.__unlock_repository()
 
         def add(self, trans_id, action):
                 """Adds an action and its content to a transaction with the
                 specified Transaction ID."""
 
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
+
+                self.__lock_repository()
                 try:
-                        t = self.scfg.in_flight_trans[trans_id]
-                except KeyError:
-                        raise RepositoryInvalidTransactionIDError(trans_id)
+                        try:
+                                t = self.__in_flight_trans[trans_id]
+                        except KeyError:
+                                raise RepositoryInvalidTransactionIDError(
+                                    trans_id)
+
+                        try:
+                                t.add_content(action)
+                        except trans.TransactionError, e:
+                                raise RepositoryError(e)
+                finally:
+                        self.__unlock_repository()
+
+        def add_package(self, pfmri):
+                """Adds the specified FMRI to the repository's catalog."""
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
+
+                self.__lock_repository()
+                try:
+                        self.__add_package(pfmri)
+                        self.__save_catalog()
+                finally:
+                        self.__unlock_repository()
+
+        @property
+        def catalog(self):
+                """Returns the Catalog object for the repository's catalog."""
+
+                if self.__catalog:
+                        # Already loaded.
+                        return self.__catalog
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                self.__catalog = catalog.Catalog(meta_root=self.catalog_root,
+                    log_updates=True, read_only=self.read_only)
+                return self.__catalog
+
+        def catalog_0(self):
+                """Returns a generator object for the full version of
+                the catalog contents.  Incremental updates are not provided
+                as the v0 updatelog does not support renames, obsoletion,
+                package removal, etc."""
+
+                c = self.catalog
+                self.inc_catalog()
+
+                # Yield each catalog attr in the v0 format:
+                # S Last-Modified: 2009-08-28T15:01:48.546606
+                # S prefix: CRSV
+                # S npkgs: 46292
+                yield "S Last-Modified: %s\n" % c.last_modified.isoformat()
+                yield "S prefix: CRSV\n"
+                yield "S npkgs: %s\n" % c.package_version_count
+
+                # Now yield each FMRI in the catalog in the v0 format:
+                # V pkg:/[email protected],5.11-0.86:20080426T173208Z
+                for pub, stem, ver in c.tuples():
+                        yield "V pkg:/%s@%s\n" % (stem, ver)
+
+        def catalog_1(self, name):
+                """Returns the absolute pathname of the named catalog file."""
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                assert name
+                self.inc_catalog()
 
                 try:
-                        t.add_content(action)
-                except trans.TransactionError, e:
-                        raise RepositoryError(e)
-
-        def catalog(self, last_modified=None):
-                """Returns a generator object containing an incremental update
-                if 'last_modified' is provided.  If 'last_modified' is not
-                provided, a generator object for the full version of the catalog
-                will be returned instead.  'last_modified' should be a datetime
-                object or an ISO8601 formatted string."""
-
-                self.scfg.inc_catalog()
-
-                if isinstance(last_modified, basestring):
-                        last_modified = catalog.ts_to_datetime(last_modified)
-
-                # Incremental catalog updates
-                c = self.scfg.catalog
-                ul = self.scfg.updatelog
-                if last_modified:
-                        if not ul.up_to_date(last_modified) and \
-                            ul.enough_history(last_modified):
-                                for line in ul._gen_updates(last_modified):
-                                        yield line
-                        else:
-                                raise RepositoryCatalogNoUpdatesError(
-                                    "incremental", c.last_modified())
-                        return
-
-                # Full catalog request.
-                # Return attributes first.
-                for line in c.attrs_as_lines():
-                        yield line
-
-                # Return the contents last.
-                for line in c.as_lines():
-                        yield line
+                        return os.path.normpath(os.path.join(
+                            self.catalog_root, name))
+                except EnvironmentError, e:
+                        if e.errno == errno.ENOENT:
+                                raise RepositoryFileNotFoundError(e.filename)
+                        raise
 
         def close(self, trans_id, refresh_index=True):
                 """Closes the transaction specified by 'trans_id'.
@@ -239,152 +1045,237 @@
                 Returns a tuple containing the package FMRI and the current
                 package state in the catalog."""
 
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
                 try:
-                        t = self.scfg.in_flight_trans[trans_id]
+                        t = self.__in_flight_trans[trans_id]
                 except KeyError:
                         raise RepositoryInvalidTransactionIDError(trans_id)
 
                 try:
                         pfmri, pstate = t.close(refresh_index=refresh_index)
-                        del self.scfg.in_flight_trans[trans_id]
+                        del self.__in_flight_trans[trans_id]
                         return pfmri, pstate
-                except (catalog.CatalogException, trans.TransactionError), e:
+                except (api_errors.CatalogError, trans.TransactionError), e:
                         raise RepositoryError(e)
 
         def file(self, fhash):
                 """Returns the absolute pathname of the file specified by the
                 provided SHA1-hash name."""
 
-                self.scfg.inc_file()
+                self.inc_file()
 
                 if fhash is None:
                         raise RepositoryFileNotFoundError(fhash)
 
                 try:
                         return os.path.normpath(os.path.join(
-                            self.scfg.file_root, misc.hash_file_name(fhash)))
+                            self.file_root, misc.hash_file_name(fhash)))
                 except EnvironmentError, e:
                         if e.errno == errno.ENOENT:
                                 raise RepositoryFileNotFoundError(fhash)
                         raise
 
+        @property
+        def in_flight_transactions(self):
+                """The number of transactions awaiting completion."""
+                return len(self.__in_flight_trans)
+
+        def inc_catalog(self):
+                self.catalog_requests += 1
+
+        def inc_manifest(self):
+                self.manifest_requests += 1
+
+        def inc_file(self):
+                self.file_requests += 1
+
+        def inc_flist(self):
+                self.flist_requests += 1
+
+        def inc_flist_files(self):
+                self.flist_files += 1
+
+        def inc_renamed(self):
+                self.pkgs_renamed += 1
+
         def manifest(self, pfmri):
                 """Returns the absolute pathname of the manifest file for the
                 specified FMRI."""
 
-                self.scfg.inc_manifest()
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                self.inc_manifest()
 
                 try:
                         if not isinstance(pfmri, fmri.PkgFmri):
-                                pfmri = fmri.PkgFmri(pfmri, None)
+                                pfmri = fmri.PkgFmri(pfmri)
                         fpath = pfmri.get_dir_path()
                 except fmri.FmriError, e:
                         raise RepositoryInvalidFMRIError(e)
 
-                return os.path.join(self.scfg.pkg_root, fpath)
+                return os.path.join(self.pkg_root, fpath)
 
         def open(self, client_release, pfmri):
                 """Starts a transaction for the specified client release and
                 FMRI.  Returns the Transaction ID for the new transaction."""
 
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
+
+                self.__lock_repository()
                 try:
-                        t = trans.Transaction()
-                        t.open(self.scfg, client_release, pfmri)
-                        self.scfg.in_flight_trans[t.get_basename()] = t
-                        return t.get_basename()
-                except trans.TransactionError, e:
-                        raise RepositoryError(e)
-
-        def rename(self, src_fmri, dest_fmri):
-                """Renames an existing package specified by 'src_fmri' to
-                'dest_fmri'.  Returns nothing."""
-
-                if not isinstance(src_fmri, fmri.PkgFmri):
                         try:
-                                src_fmri = fmri.PkgFmri(src_fmri, None)
-                        except fmri.FmriError, e:
-                                raise RepositoryInvalidFMRIError(e)
-
-                if not isinstance(dest_fmri, fmri.PkgFmri):
-                        try:
-                                dest_fmri = fmri.PkgFmri(dest_fmri, None)
-                        except fmri.FmriError, e:
-                                raise RepositoryInvalidFMRIError(e)
-
-                try:
-                        self.scfg.updatelog.rename_package(src_fmri.pkg_name,
-                            str(src_fmri.version), dest_fmri.pkg_name,
-                            str(dest_fmri.version))
-                except (catalog.CatalogException, catalog.RenameException):
-                        raise RepositoryRenameFailureError(dest_fmri)
-
-                self.scfg.inc_renamed()
+                                t = trans.Transaction()
+                                t.open(self, client_release, pfmri)
+                                self.__in_flight_trans[t.get_basename()] = t
+                                return t.get_basename()
+                        except trans.TransactionError, e:
+                                raise RepositoryError(e)
+                finally:
+                        self.__unlock_repository()
 
         def refresh_index(self):
-                """Updates the repository's search indices."""
-                self.scfg.catalog.refresh_index()
+                """ This function refreshes the search indexes if there any new
+                packages.  It starts a subprocess which results in a call to
+                run_update_index (see below) which does the actual update."""
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                self.__lock_repository()
+                try:
+                        self.__refresh_index()
+                finally:
+                        self.__unlock_repository()
+
+        def rebuild(self):
+                """Rebuilds the repository catalog and search indices using the
+                package manifests currently in the repository."""
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                self.__lock_repository()
+                try:
+                        self.__destroy_catalog()
+                        self.__init_dirs()
+                        self.__check_search()
+                        self.__rebuild()
+                        self.__refresh_index()
+                finally:
+                        self.__unlock_repository()
+
+        def reload(self, cfgpathname=None, properties=EmptyDict):
+                """Reloads the repository state information from disk."""
+
+                self.__lock_repository()
+                self.__init_config(cfgpathname=cfgpathname,
+                    properties=properties)
+                self.__init_state()
+                self.__unlock_repository()
+
+        def run_update_index(self):
+                """ Determines which fmris need to be indexed and passes them
+                to the indexer.
+
+                Note: Only one instance of this method should be running.
+                External locking is expected to ensure this behavior. Calling
+                refresh index is the preferred method to use to reindex.
+                """
 
-        def search(self, query_str_lst):
-                """Searches the index for each query in the list of query
-                strings.  Each string should be the output of str(Query)."""
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                c = self.catalog
+                # XXX this is pretty gross; the indexer needs to take a
+                # generator.
+                fmris_to_index = set(c.fmris())
+
+                indexer.Indexer.check_for_updates(self.index_root,
+                    fmris_to_index)
+
+                if fmris_to_index:
+                        self.__index_log("Updating search indices")
+                        self.__update_searchdb_unlocked(fmris_to_index)
+                else:
+                        ind = indexer.Indexer(self.index_root,
+                            self._get_manifest, self.manifest,
+                            log=self.__index_log)
+                        ind.setup()
 
+        def search(self, queries):
+                """Searches the index for each query in the list of queries.
+                Each entry should be the output of str(Query), or a Query
+                object."""
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+
+                def _search(q):
+                        assert self.index_root
+                        l = query_p.QueryLexer()
+                        l.build()
+                        qp = query_p.QueryParser(l)
+                        query = qp.parse(q.encoded_text())
+                        query.set_info(num_to_return=q.num_to_return,
+                            start_point=q.start_point,
+                            index_dir=self.index_root,
+                            get_manifest_path=self.manifest,
+                            case_sensitive=q.case_sensitive)
+                        return query.search(c.fmris)                
+
+                c = self.catalog
+                query_lst = []
                 try:
-                        query_lst = [
-                            query_p.Query.fromstr(s)
-                            for s in query_str_lst
-                        ]
+                        for s in queries:
+                                if not isinstance(s, query_p.Query):
+                                        query_lst.append(
+                                            query_p.Query.fromstr(s))
+                                else:
+                                        query_lst.append(s)
                 except query_p.QueryException, e:
                         raise RepositoryError(e)
-                
-                res_list = [
-                    self.scfg.catalog.search(q)
-                    for q in query_lst
-                ]
-                return res_list
+                return [_search(q) for q in query_lst]
 
-class NastyRepository(Repository):
-        """A repository object that helps the Nasty server misbehave.
-        At the present time, this only overrides the catalog method,
-        so that the catalog may pass a scfg object to the Catalog and
-        UpdateLog."""
+        @property
+        def search_available(self):
+                return self.__search_available or self.__check_search()
 
-        def __init__(self, scfg, cfgpathname=None):
-                """Prepare the repository for use."""
-
-                Repository.__init__(self, scfg, cfgpathname)
+        def valid_new_fmri(self, pfmri):
+                """Check that the FMRI supplied as an argument would be valid
+                to add to the repository catalog.  This checks to make sure
+                that any past catalog operations (such as a rename or freeze)
+                would not prohibit the caller from adding this FMRI."""
 
-        def catalog(self, last_modified=None):
-                """Returns a generator object containing an incremental update
-                if 'last_modified' is provided.  If 'last_modified' is not
-                provided, a generator object for the full version of the catalog
-                will be returned instead.  'last_modified' should be a datetime
-                object or an ISO8601 formatted string."""
-
-                self.scfg.inc_catalog()
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if not fmri.is_valid_pkg_name(pfmri.get_name()):
+                        return False
+                if not pfmri.version:
+                        return False
 
-                if isinstance(last_modified, basestring):
-                        last_modified = catalog.ts_to_datetime(last_modified)
+                c = self.catalog
+                try:
+                        c.get_entry(pfmri)
+                except api_errors.UnknownCatalogEntry:
+                        return True
+                return False
+
+        def write_config(self):
+                """Save the repository's current configuration data."""
 
-                # Incremental catalog updates
-                c = self.scfg.catalog
-                ul = self.scfg.updatelog
-                if last_modified:
-                        if not ul.up_to_date(last_modified) and \
-                            ul.enough_history(last_modified):
-                                for line in ul._gen_updates(last_modified,
-                                    self.scfg):
-                                        yield line
-                        else:
-                                raise RepositoryCatalogNoUpdatesError(
-                                    "incremental", c.last_modified())
-                        return
+                self.__lock_repository()
+                try:
+                        self.__write_config()
+                finally:
+                        self.__unlock_repository()
 
-                # Full catalog request.
-                # Return attributes first.
-                for line in c.attrs_as_lines():
-                        yield line
+        catalog_root = property(__get_catalog_root, __set_catalog_root)
+        repo_root = property(__get_repo_root, __set_repo_root)
+        writable_root = property(__get_writable_root, __set_writable_root)
 
-                # Return the contents last.
-                for line in c.as_lines(self.scfg):
-                        yield line
-
--- a/src/modules/server/repositoryconfig.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/repositoryconfig.py	Fri Oct 23 17:43:37 2009 -0500
@@ -26,100 +26,104 @@
 import ConfigParser
 import pkg.misc as misc
 import pkg.Uuid25 as uuid
+import random
 
-ATTR_TYPE_STR = 0
-ATTR_TYPE_INT = 1
-ATTR_TYPE_FLOAT = 2
-ATTR_TYPE_BOOL = 3
-ATTR_TYPE_UUID = 4
-ATTR_TYPE_URI = 5
-ATTR_TYPE_URI_LIST = 6
-ATTR_TYPE_PUB_ALIAS = 7
-ATTR_TYPE_PUB_PREFIX = 8
-ATTR_TYPE_REPO_COLL_TYPE = 9
+PROP_TYPE_STR = 0
+PROP_TYPE_INT = 1
+PROP_TYPE_FLOAT = 2
+PROP_TYPE_BOOL = 3
+PROP_TYPE_UUID = 4
+PROP_TYPE_URI = 5
+PROP_TYPE_URI_LIST = 6
+PROP_TYPE_PUB_ALIAS = 7
+PROP_TYPE_PUB_PREFIX = 8
+PROP_TYPE_REPO_COLL_TYPE = 9
 
-class InvalidAttributeError(Exception):
-        """Exception class used to indicate an invalid attribute.
-        """
+class PropertyError(Exception):
+        """Base exception class for property errors."""
+
         def __init__(self, *args):
-                """Standard init override for Exception class."""
                 Exception.__init__(self, *args)
 
-class InvalidAttributeValueError(Exception):
-        """Exception class used to indicate an invalid attribute value.
-        """
-        def __init__(self, *args):
-                """Standard init override for Exception class."""
-                Exception.__init__(self, *args)
+
+class InvalidPropertyError(PropertyError):
+        """Exception class used to indicate an invalid property."""
+
+
+class InvalidPropertyValueError(PropertyError):
+        """Exception class used to indicate an invalid property value."""
+
 
-class ReadOnlyAttributeError(Exception):
+class RequiredPropertyValueError(PropertyError):
+        """Exception class used to indicate a required property value is
+        missing."""
+
+
+class ReadOnlyPropertyError(PropertyError):
         """Exception class used to indicate when an attempt to set a read-only
-        value was made.
-        """
-        def __init__(self, *args):
-                """Standard init override for Exception class."""
-                Exception.__init__(self, *args)
+        value was made."""
+
 
 class RepositoryConfig(object):
         """A RepositoryConfig object is a collection of configuration
         information and metadata for a repository.
         """
 
-        # This data structure defines the list of possible attributes for a
-        # repository along with two optional attributes: default and readonly.
-        _attrs = {
+        # This data structure defines the list of possible properties for a
+        # repository along with two optional properties: default and readonly.
+        _props = {
             "publisher": {
                 "alias": {
-                    "type": ATTR_TYPE_PUB_ALIAS,
+                    "type": PROP_TYPE_PUB_ALIAS,
                 },
                 "prefix": {
-                    "type": ATTR_TYPE_PUB_PREFIX,
+                    "type": PROP_TYPE_PUB_PREFIX,
                 },
             },
             "repository": {
                 "collection_type": {
-                    "type": ATTR_TYPE_REPO_COLL_TYPE,
+                    "type": PROP_TYPE_REPO_COLL_TYPE,
                     "default": "core",
                 },
                 "description": {},
                 "detailed_url": {
-                    "type": ATTR_TYPE_URI,
+                    "type": PROP_TYPE_URI,
                     "default": "http://www.opensolaris.com"
                 },
                 "legal_uris": {
-                    "type": ATTR_TYPE_URI_LIST
+                    "type": PROP_TYPE_URI_LIST
                 },
                 "maintainer": {
                     "default":
                         "Project Indiana <[email protected]>"
                 },
                 "maintainer_url": {
-                    "type": ATTR_TYPE_URI,
+                    "type": PROP_TYPE_URI,
                     "default": "http://www.opensolaris.org/os/project/indiana/"
                 },
                 "mirrors": {
-                    "type": ATTR_TYPE_URI_LIST
+                    "type": PROP_TYPE_URI_LIST
                 },
                 "name": {
                     "default": "package repository"
                 },
                 "origins": {
-                    "type": ATTR_TYPE_URI_LIST
+                    "type": PROP_TYPE_URI_LIST
                 },
                 "refresh_seconds": {
-                    "type": ATTR_TYPE_INT,
+                    "type": PROP_TYPE_INT,
                     "default": 4 * 60 * 60, # default is 4 hours
                 },
                 "registration_uri": {
-                    "type": ATTR_TYPE_URI,
+                    "type": PROP_TYPE_URI,
                 },
                 "related_uris": {
-                    "type": ATTR_TYPE_URI_LIST
+                    "type": PROP_TYPE_URI_LIST
                 },
             },
             "feed": {
                 "id": {
-                    "type": ATTR_TYPE_UUID,
+                    "type": PROP_TYPE_UUID,
                     "readonly": True,
                 },
                 "name": {
@@ -133,25 +137,28 @@
                     "default": "web/_themes/pkg-block-logo.png"
                 },
                 "window": {
-                    "type": ATTR_TYPE_INT,
+                    "type": PROP_TYPE_INT,
                     "default": 24
                 },
             },
         }
 
-        def __init__(self, pathname=None):
+        def __init__(self, pathname=None, properties=misc.EmptyDict):
                 """Initializes a RepositoryConfig object.
 
                 Will read existing configuration data from pathname, if
                 specified.
                 """
 
+                self.cfg_cache = {}
+                self.nasty = 0
+
                 if pathname:
                         # If a pathname was provided, read the data in.
-                        self.read(pathname)
+                        self.read(pathname, overrides=properties)
                 else:
                         # Otherwise, initialize to default state.
-                        self.__reset()
+                        self.__reset(overrides=properties)
 
         def __str__(self):
                 """Returns a string representation of the configuration
@@ -159,79 +166,82 @@
                 """
                 return "%s" % self.cfg_cache
 
-        def __reset(self):
+        def __reset(self, overrides=misc.EmptyDict):
                 """Returns the configuration object to its default state.
                 """
+
                 self.cfg_cache = {}
-                for section in self._attrs:
-                        sattrs = self._attrs[section]
-                        for attr in sattrs:
-                                info = sattrs[attr]
+                for section in self._props:
+                        sprops = self._props[section]
+                        for prop in sprops:
+                                info = sprops[prop]
                                 default = info.get("default", None)
 
-                                atype = self.get_attribute_type(section, attr)
+                                if section in overrides and \
+                                    prop in overrides[section]:
+                                        default = overrides[section][prop]
+
+                                ptype = self.get_property_type(section, prop)
                                 if default is None and \
-                                    atype == ATTR_TYPE_URI_LIST:
+                                    ptype == PROP_TYPE_URI_LIST:
                                         default = []
 
-                                if section not in self.cfg_cache:
-                                        self.cfg_cache[section] = {}
-
-                                self.cfg_cache[section][attr] = default
+                                self.cfg_cache.setdefault(section, {})
+                                self.cfg_cache[section][prop] = default
 
         @classmethod
-        def is_valid_attribute(cls, section, attr, raise_error=False):
-                """Returns a boolean indicating whether the given attribute
+        def is_valid_property(cls, section, prop, raise_error=False):
+                """Returns a boolean indicating whether the given property
                 is valid for the specified section.
 
                 This function will raise an exception instead of returning a
                 boolean is raise_error=True is specified.
                 """
-                if section not in cls._attrs:
+                if section not in cls._props:
                         if raise_error:
-                                raise InvalidAttributeError("Invalid "
-                                    " attribute. Unknown section: %s." % \
+                                raise InvalidPropertyError("Invalid "
+                                    " property. Unknown section: %s." % \
                                     (section))
                         else:
                                 return False
-                if attr not in cls._attrs[section]:
+                if prop not in cls._props[section]:
                         if raise_error:
-                                raise InvalidAttributeError("Invalid "
-                                    "attribute %s.%s." % \
-                                    (section, attr))
+                                raise InvalidPropertyError("Invalid "
+                                    "property %s.%s." % \
+                                    (section, prop))
                         else:
                                 return False
                 return True
 
         @classmethod
-        def get_attribute_type(cls, section, attr):
+        def get_property_type(cls, section, prop):
                 """Returns a numeric value indicating the data type of the
-                given attribute for the specified section.
+                given property for the specified section.
 
                 The return value corresponds to one of the following module
                 constants which matches a Python data type:
-                    ATTR_TYPE_STR               str
-                    ATTR_TYPE_INT               int
-                    ATTR_TYPE_FLOAT             float
-                    ATTR_TYPE_BOOL              boolean
-                    ATTR_TYPE_UUID              str
-                    ATTR_TYPE_URI               str
-                    ATTR_TYPE_URI_LIST          list of str
-                    ATTR_TYPE_PUB_ALIAS         str
-                    ATTR_TYPE_PUB_PREFIX        str
-                    ATTR_TYPE_REPO_COLL_TYPE    str
+                    PROP_TYPE_STR               str
+                    PROP_TYPE_INT               int
+                    PROP_TYPE_FLOAT             float
+                    PROP_TYPE_BOOL              boolean
+                    PROP_TYPE_UUID              str
+                    PROP_TYPE_URI               str
+                    PROP_TYPE_URI_LIST          list of str
+                    PROP_TYPE_PUB_ALIAS         str
+                    PROP_TYPE_PUB_PREFIX        str
+                    PROP_TYPE_REPO_COLL_TYPE    str
                 """
-                if cls.is_valid_attribute(section, attr, raise_error=True):
-                        info = cls._attrs[section][attr]
-                        return info.get("type", ATTR_TYPE_STR)
+                if cls.is_valid_property(section, prop, raise_error=True):
+                        info = cls._props[section][prop]
+                        return info.get("type", PROP_TYPE_STR)
                 else:
                         return False
 
         @classmethod
-        def is_valid_attribute_value(cls, section, attr, value,
+        def is_valid_property_value(cls, section, prop, value,
             raise_error=False):
-                """Returns a boolean indicating whether the given attribute
-                value is valid for the specified section and attribute.
+                """Returns a boolean indicating whether the given property
+                value is valid for the specified section and property.
 
                 This function will raise an exception instead of returning a
                 boolean is raise_error=True is specified.
@@ -248,42 +258,43 @@
                         if not valid:
                                 raise ValueError()
 
-                if cls.is_valid_attribute(section, attr,
+                if cls.is_valid_property(section, prop,
                     raise_error=raise_error):
-                        atype = cls.get_attribute_type(section, attr)
+                        ptype = cls.get_property_type(section, prop)
                         # If the type is string, we always assume it is valid.
                         # For all other types, we attempt a forced conversion
                         # of the value; if it fails, we know the value isn't
                         # valid for the given type.
                         try:
-                                if atype == ATTR_TYPE_STR:
+                                if ptype == PROP_TYPE_STR:
                                         return True
-                                elif atype == ATTR_TYPE_INT:
+                                elif ptype == PROP_TYPE_INT:
                                         int(value)
-                                elif atype == ATTR_TYPE_FLOAT:
+                                elif ptype == PROP_TYPE_FLOAT:
                                         float(value)
-                                elif atype == ATTR_TYPE_BOOL:
+                                elif ptype == PROP_TYPE_BOOL:
                                         if str(value) not in ("True", "False"):
                                                 raise TypeError
-                                elif atype == ATTR_TYPE_UUID:
-                                        # None is valid for configuration
-                                        # purposes, even though UUID would
-                                        # fail.
-                                        if value is not None:
+                                elif ptype == PROP_TYPE_UUID:
+                                        # None and '' are valid for
+                                        # configuration purposes, even though
+                                        # UUID would fail.
+                                        if value not in (None, ""):
                                                 uuid.UUID(hex=str(value))
-                                elif atype == ATTR_TYPE_URI:
+                                elif ptype == PROP_TYPE_URI:
                                         if value in (None, ""):
                                                 return True
                                         validate_uri(value)
-                                elif atype == ATTR_TYPE_URI_LIST:
+                                elif ptype == PROP_TYPE_URI_LIST:
                                         if not isinstance(value, list):
                                                 raise TypeError
                                         for u in value:
                                                 validate_uri(u)
-                                elif atype in (ATTR_TYPE_PUB_ALIAS,
-                                    ATTR_TYPE_PUB_PREFIX):
-                                        # For now, values are not required.
-                                        if value in (None, ""):
+                                elif ptype in (PROP_TYPE_PUB_ALIAS,
+                                    PROP_TYPE_PUB_PREFIX):
+                                        # For now, alias is not required.
+                                        if ptype == PROP_TYPE_PUB_ALIAS and \
+                                            value in (None, ""):
                                                 return True
 
                                         # The same rules that apply to publisher
@@ -291,19 +302,23 @@
                                         # now).
                                         if not misc.valid_pub_prefix(value):
                                                 raise ValueError()
-                                elif atype == ATTR_TYPE_REPO_COLL_TYPE:
+                                elif ptype == PROP_TYPE_REPO_COLL_TYPE:
                                         if str(value) not in ("core",
                                             "supplemental"):
                                                 raise TypeError
                                 else:
                                         raise RuntimeError(
-                                            "Unknown attribute type: %s" % \
-                                            atype)
+                                            "Unknown property type: %s" % \
+                                            ptype)
                         except (TypeError, ValueError, OverflowError):
                                 if raise_error:
-                                        raise InvalidAttributeValueError(
-                                            "Invalid value for %s.%s." % \
-                                            (section, attr))
+                                        if value in (None, ""):
+                                                raise RequiredPropertyValueError(
+                                                    "%s.%s is required." % \
+                                                    (section, prop))
+                                        raise InvalidPropertyValueError(
+                                            "Invalid value '%s' for %s.%s." % \
+                                            (value, section, prop))
                                 else:
                                         return False
                 else:
@@ -311,69 +326,69 @@
                 return True
 
         @classmethod
-        def is_readonly_attribute(cls, section, attr):
-                """Returns a boolean indicating whether the given attribute
+        def is_readonly_property(cls, section, prop):
+                """Returns a boolean indicating whether the given property
                 is read-only.
                 """
-                if cls.is_valid_attribute(section, attr, raise_error=True):
-                        info = cls._attrs[section][attr]
+                if cls.is_valid_property(section, prop, raise_error=True):
+                        info = cls._props[section][prop]
                         return info.get("readonly", False)
 
         @classmethod
-        def get_attributes(cls):
-                """Returns a dictionary of all attribute sections with each
-                section's attributes as a list.
+        def get_properties(cls):
+                """Returns a dictionary of all property sections with each
+                section's properties as a list.
                 """
                 return dict(
-                    (section, [attr for attr in cls._attrs[section]])
-                        for section in cls._attrs
+                    (section, [prop for prop in cls._props[section]])
+                        for section in cls._props
                 )
 
-        def get_attribute(self, section, attr):
-                """Returns the value of the specified attribute for the given
+        def get_property(self, section, prop):
+                """Returns the value of the specified property for the given
                 section.
                 """
-                if self.is_valid_attribute(section, attr, raise_error=True):
-                        return self.cfg_cache[section][attr]
+                if self.is_valid_property(section, prop, raise_error=True):
+                        return self.cfg_cache[section][prop]
 
-        def _set_attribute(self, section, attr, value):
-                """Sets the value of a given configuration attribute for the
+        def _set_property(self, section, prop, value):
+                """Sets the value of a given configuration property for the
                 specified section.
 
-                This method does not check the read-only status of an attribute
+                This method does not check the read-only status of an property
                 and is intended for internal use.
                 """
-                self.is_valid_attribute_value(section, attr, value,
+                self.is_valid_property_value(section, prop, value,
                     raise_error=True)
 
-                atype = self.get_attribute_type(section, attr)
-                if atype == ATTR_TYPE_INT:
-                        self.cfg_cache[section][attr] = int(value)
-                elif atype == ATTR_TYPE_FLOAT:
-                        self.cfg_cache[section][attr] = float(value)
-                elif atype == ATTR_TYPE_BOOL:
+                ptype = self.get_property_type(section, prop)
+                if ptype == PROP_TYPE_INT:
+                        self.cfg_cache[section][prop] = int(value)
+                elif ptype == PROP_TYPE_FLOAT:
+                        self.cfg_cache[section][prop] = float(value)
+                elif ptype == PROP_TYPE_BOOL:
                         if str(value) == "True":
-                                self.cfg_cache[section][attr] = True
+                                self.cfg_cache[section][prop] = True
                         else:
-                                self.cfg_cache[section][attr] = False
+                                self.cfg_cache[section][prop] = False
                 else:
                         # Treat all remaining types as a simple value.
-                        self.cfg_cache[section][attr] = value
+                        self.cfg_cache[section][prop] = value
 
-        def set_attribute(self, section, attr, value):
-                """Sets a given configuration attribute to the specified
+        def set_property(self, section, prop, value):
+                """Sets a given configuration property to the specified
                 value for the specified section.
 
                 This function will raise an exception if the specified
-                attribute is read-only.
+                property is read-only.
                 """
-                if not self.is_readonly_attribute(section, attr):
-                        return self._set_attribute(section, attr, value)
+                if not self.is_readonly_property(section, prop):
+                        return self._set_property(section, prop, value)
                 else:
-                        raise ReadOnlyAttributeError("%s.%s is read-only." % \
-                            (attr, section))
+                        raise ReadOnlyPropertyError("%s.%s is read-only." % \
+                            (prop, section))
 
-        def read(self, pathname):
+        def read(self, pathname, overrides=misc.EmptyDict):
                 """Reads the specified pathname and populates the configuration
                 object based on the data contained within.  The file is
                 expected to be in a ConfigParser-compatible format.
@@ -393,44 +408,49 @@
                             "'%s'.") % pathname)
 
                 assert r[0] == pathname
-                for section in self._attrs:
-                        for attr in self._attrs[section]:
-                                atype = self.get_attribute_type(section, attr)
+                for section in self._props:
+                        for prop in self._props[section]:
+                                ptype = self.get_property_type(section, prop)
                                 try:
+                                        if section in overrides and \
+                                            prop in overrides[section]:
+                                                val = overrides[section][prop]
+                                                cp.set(section, prop, str(val))
+
                                         # Retrieve the value as a string first
                                         # to prevent ConfigParser from causing
                                         # an exception.
-                                        value = cp.get(section, attr)
+                                        value = cp.get(section, prop)
 
                                         # The list types are special in that
                                         # they must be converted first before
                                         # validation.
-                                        if atype == ATTR_TYPE_URI_LIST:
+                                        if ptype == PROP_TYPE_URI_LIST:
                                                 uris = []
                                                 for u in value.split(","):
                                                         if u:
                                                                 uris.append(u)
                                                 value = uris
 
-                                        self.is_valid_attribute_value(
-                                            section, attr, value,
+                                        self.is_valid_property_value(
+                                            section, prop, value,
                                             raise_error=True)
 
-                                        if atype == ATTR_TYPE_INT:
+                                        if ptype == PROP_TYPE_INT:
                                                 value = cp.getint(section,
-                                                    attr)
-                                        elif atype == ATTR_TYPE_FLOAT:
+                                                    prop)
+                                        elif ptype == PROP_TYPE_FLOAT:
                                                 value = cp.getfloat(section,
-                                                    attr)
-                                        elif atype == ATTR_TYPE_BOOL:
+                                                    prop)
+                                        elif ptype == PROP_TYPE_BOOL:
                                                 value = cp.getboolean(section,
-                                                    attr)
+                                                    prop)
 
-                                        self.cfg_cache[section][attr] = value
+                                        self.cfg_cache[section][prop] = value
 
                                 except (ConfigParser.NoSectionError,
                                     ConfigParser.NoOptionError):
-                                        # Skip any missing attributes.
+                                        # Skip any missing properties.
                                         continue
 
         def write(self, pathname):
@@ -439,20 +459,20 @@
                 """
                 cp = ConfigParser.SafeConfigParser()
 
-                for section in self._attrs:
+                for section in self._props:
                         cp.add_section(section)
-                        for attr in self._attrs[section]:
-                                value = self.cfg_cache[section][attr]
+                        for prop in self._props[section]:
+                                value = self.cfg_cache[section][prop]
 
-                                atype = self.get_attribute_type(section, attr)
-                                if atype == ATTR_TYPE_URI_LIST:
+                                ptype = self.get_property_type(section, prop)
+                                if ptype == PROP_TYPE_URI_LIST:
                                         value = ",".join(value)
 
                                 if value is not None:
-                                        cp.set(section, attr, str(value))
+                                        cp.set(section, prop, str(value))
                                 else:
                                         # Force None to be an empty string.
-                                        cp.set(section, attr, "")
+                                        cp.set(section, prop, "")
 
                 try:
                         f = open(pathname, "w")
@@ -461,3 +481,61 @@
                             "%s" % (pathname, strerror))
                 cp.write(f)
 
+        def validate(self):
+                """Verify that the in-memory contents of the configuration
+                satisfy validation requirements (such as required fields)."""
+
+                for section in self._props:
+                        for prop in self._props[section]:
+                                value = self.cfg_cache.get(section,
+                                    {}).get(prop)
+                                ptype = self.get_property_type(section, prop)
+                                self.is_valid_property_value(
+                                    section, prop, value,
+                                    raise_error=True)
+
+        def set_nasty(self, level):
+                """Set the nasty level using an integer."""
+
+                self.nasty = level
+
+        def is_nasty(self):
+                """Returns true if nasty has been enabled."""
+
+                if self.nasty > 0:
+                        return True
+                return False
+
+        def need_nasty(self):
+                """Randomly returns true when the server should misbehave."""
+
+                if random.randint(1, 100) <= self.nasty:
+                        return True
+                return False
+
+        def need_nasty_bonus(self, bonus=0):
+                """Used to temporarily apply extra nastiness to an operation."""
+
+                if self.nasty + bonus > 95:
+                        nasty = 95
+                else:
+                        nasty = self.nasty + bonus
+
+                if random.randint(1, 100) <= nasty:
+                        return True
+                return False
+
+        def need_nasty_occasionally(self):
+                if random.randint(1, 500) <= self.nasty:
+                        return True
+                return False
+
+        def need_nasty_infrequently(self):
+                if random.randint(1, 2000) <= self.nasty:
+                        return True
+                return False
+
+        def need_nasty_rarely(self):
+                if random.randint(1, 20000) <= self.nasty:
+                        return True
+                return False
--- a/src/modules/server/transaction.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/server/transaction.py	Fri Oct 23 17:43:37 2009 -0500
@@ -31,6 +31,7 @@
 import shutil
 import urllib
 
+import pkg.actions as actions
 import pkg.fmri as fmri
 import pkg.misc as misc
 from pkg.pkggzip import PkgGzipFile
@@ -86,6 +87,12 @@
                 elif "valid_new_fmri" in self.args:
                         return _("The specified FMRI, '%s', already exists or "
                             "has been restricted.") % self.args.get("pfmri", "")
+                elif "publisher_required" in self.args:
+                        return _("The specified FMRI, '%s', must include the "
+                            "publisher prefix as the repository contains "
+                            "package data for more than one publisher or "
+                            "a default publisher has not been defined.") % \
+                            self.args.get("pfmri", "")
                 elif "pfmri" in self.args:
                         return _("The specified FMRI, '%s', is invalid.") % \
                             self.args["pfmri"]
@@ -110,8 +117,7 @@
                 self.open_time = None
                 self.pkg_name = ""
                 self.esc_pkg_name = ""
-                self.critical = False
-                self.cfg = None
+                self.repo = None
                 self.client_release = ""
                 self.fmri = None
                 self.dir = ""
@@ -124,9 +130,9 @@
                     (calendar.timegm(self.open_time.utctimetuple()),
                     urllib.quote(str(self.fmri), ""))
 
-        def open(self, cfg, client_release, pfmri):
+        def open(self, repo, client_release, pfmri):
                 # XXX needs to be done in __init__
-                self.cfg = cfg
+                self.repo = repo
 
                 if client_release is None:
                         raise TransactionOperationError(client_release=None,
@@ -145,11 +151,36 @@
                 except fmri.FmriError, e:
                         raise TransactionOperationError(e)
 
-                # We must have a version supplied for publication.
+                # Version is required for publication.
                 if self.fmri.version is None:
                         raise TransactionOperationError(fmri_version=None,
                             pfmri=pfmri)
 
+                # Ensure that the FMRI has been fully qualified with publisher
+                # information or apply the default if appropriate.
+                if not self.fmri.publisher:
+                        c = repo.catalog
+                        pubs = c.publishers()
+                        default_pub = repo.cfg.get_property("publisher",
+                            "prefix")
+
+                        if len(pubs) > 1 or not default_pub:
+                                # A publisher is required if the repository
+                                # contains package data for more than one
+                                # publisher or no default has been defined.
+                                raise TransactionOperationError(
+                                    publisher_required=True, pfmri=pfmri)
+
+                        self.fmri.publisher = default_pub
+                        pkg_name = self.pkg_name
+                        pub_string = "pkg://%s/" % default_pub
+                        if not pkg_name.startswith("pkg:/"):
+                                pkg_name = pub_string + pkg_name
+                        else:
+                                pkg_name = pkg_name.replace("pkg:/", pub_string)
+                        self.pkg_name = pkg_name
+                        self.esc_pkg_name = urllib.quote(pkg_name, "")
+
                 # record transaction metadata: opening_time, package, user
                 # XXX publishing with a custom timestamp may require
                 # authorization above the basic "can open transactions".
@@ -167,12 +198,12 @@
                 # Check that the new FMRI's version is valid.  In other words,
                 # the package has not been renamed or frozen for the new
                 # version.
-                if not cfg.catalog.valid_new_fmri(self.fmri):
+                if not repo.valid_new_fmri(self.fmri):
                         raise TransactionOperationError(valid_new_fmri=False,
                             pfmri=pfmri)
 
                 trans_basename = self.get_basename()
-                self.dir = "%s/%s" % (cfg.trans_root, trans_basename)
+                self.dir = "%s/%s" % (repo.trans_root, trans_basename)
 
                 try:
                         os.makedirs(self.dir)
@@ -186,8 +217,13 @@
                 # always create a minimal manifest
                 #
                 tfile = file("%s/manifest" % self.dir, "ab")
-                print >> tfile,  "# %s, client release %s" % (self.pkg_name,
-                    self.client_release)
+
+                # Build a set action containing the fully qualified FMRI and add
+                # it to the manifest.  While it may seem inefficient to create
+                # an action string, convert it to an action, and then back, it
+                # does ensure that the server is adding a valid action.
+                fact = actions.fromstr("set name=pkg.fmri value=%s" % self.fmri)
+                print >> tfile, str(fact)
                 tfile.close()
 
                 # XXX:
@@ -203,11 +239,11 @@
                 # if not found, create package
                 # set package state to TRANSACTING
 
-        def reopen(self, cfg, trans_dir):
+        def reopen(self, repo, trans_dir):
                 """The reopen() method is invoked on server restart, to
                 reestablish the status of inflight transactions."""
 
-                self.cfg = cfg
+                self.repo = repo
                 open_time_str, self.esc_pkg_name = \
                     os.path.basename(trans_dir).split("_", 1)
                 self.open_time = \
@@ -218,7 +254,7 @@
                 # client release on the initial open of the transaction.
                 self.fmri = fmri.PkgFmri(self.pkg_name, None)
 
-                self.dir = "%s/%s" % (self.cfg.trans_root, self.get_basename())
+                self.dir = "%s/%s" % (repo.trans_root, self.get_basename())
 
         def close(self, refresh_index=True):
                 """Closes an open transaction, returning the published FMRI for
@@ -239,7 +275,7 @@
 
                 # Discard the in-flight transaction data.
                 try:
-                        shutil.rmtree(os.path.join(self.cfg.trans_root,
+                        shutil.rmtree(os.path.join(self.repo.trans_root,
                             trans_id))
                 except EnvironmentError, e:
                         # Ensure that the error goes to stderr, and then drive
@@ -251,7 +287,7 @@
         def abandon(self):
                 trans_id = self.get_basename()
                 # state transition from TRANSACTING to ABANDONED
-                shutil.rmtree("%s/%s" % (self.cfg.trans_root, trans_id))
+                shutil.rmtree("%s/%s" % (self.repo.trans_root, trans_id))
                 return "ABANDONED"
 
         def add_content(self, action):
@@ -308,7 +344,7 @@
                         # to work right.
                         #
                         fpath = misc.hash_file_name(fname)
-                        dst_path = "%s/%s" % (self.cfg.file_root, fpath)
+                        dst_path = "%s/%s" % (self.repo.file_root, fpath)
                         fileneeded = True
                         if os.path.exists(dst_path):
                                 if PkgGzipFile.test_is_pkggzipfile(dst_path):
@@ -370,10 +406,9 @@
                 # our response with any other packages that moved to
                 # PUBLISHED due to the package's arrival.
                 self.publish_package()
-                self.cfg.updatelog.add_package(self.fmri, self.critical)
-
+                self.repo.add_package(self.fmri)
                 if refresh_index:
-                        self.cfg.catalog.refresh_index()
+                        self.repo.refresh_index()
 
                 return (str(self.fmri), "PUBLISHED")
 
@@ -382,13 +417,13 @@
 
                 It moves the files associated with the transaction into the
                 appropriate position in the server repository.  Callers
-                shall supply a fmri, config, and transaction in fmri, cfg,
-                and trans, respectively."""
+                shall supply a fmri, repository, and transaction in fmri,
+                repo, and trans, respectively."""
 
-                cfg = self.cfg
+                repo = self.repo
 
                 pkg_name = self.fmri.pkg_name
-                pkgdir = os.path.join(cfg.pkg_root, urllib.quote(pkg_name, ""))
+                pkgdir = os.path.join(repo.pkg_root, urllib.quote(pkg_name, ""))
 
                 # If the directory isn't there, create it.
                 if not os.path.exists(pkgdir):
@@ -406,7 +441,7 @@
                 for f in os.listdir(self.dir):
                         path = misc.hash_file_name(f)
                         src_path = os.path.join(self.dir, f)
-                        dst_path = os.path.join(cfg.file_root, path)
+                        dst_path = os.path.join(repo.file_root, path)
                         try:
                                 portable.rename(src_path, dst_path)
                         except OSError, e:
--- a/src/modules/updatelog.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/modules/updatelog.py	Fri Oct 23 17:43:37 2009 -0500
@@ -32,6 +32,7 @@
         os.SEEK_SET
 except AttributeError:
         os.SEEK_SET, os.SEEK_CUR, os.SEEK_END = range(3)
+import random
 import re
 import time
 import tempfile
@@ -208,7 +209,7 @@
                 # Then append the new line.
                 try:
                         for entry in lfile:
-                               tfile.write(entry)
+                                tfile.write(entry)
                         tfile.write(logstr)
                 except Exception:
                         portable.remove(tmpfile)
@@ -266,10 +267,12 @@
                         else:
                                 catalog.ServerCatalog.recv(c, path, pub)
                 except EnvironmentError, e:
-                        if isinstance(e, EnvironmentError):
-                                if e.errno == errno.EACCES:
-                                        raise api_errors.PermissionsException(
-                                            e.filename)
+                        if e.errno == errno.EACCES:
+                                raise api_errors.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise api_errors.ReadOnlyFileSystemException(
+                                    e.filename)
                         raise
 
         @staticmethod
@@ -359,7 +362,7 @@
                         if e.errno == errno.ENOENT:
                                 # Creating an empty file
                                 file(catpath, "wb").close()
-                                pfile = file(self.catalog_file, "rb")
+                                pfile = file(catpath, "rb")
                         else:
                                 tfile.close()
                                 portable.remove(tmpfile)
--- a/src/pkgdefs/SUNWipkg/prototype	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/pkgdefs/SUNWipkg/prototype	Fri Oct 23 17:43:37 2009 -0500
@@ -201,14 +201,10 @@
 f none usr/lib/python2.4/vendor-packages/pkg/server/api_errors.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/catalog.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/catalog.pyc 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/server/config.py 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/server/config.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/depot.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/depot.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/depotresponse.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/depotresponse.pyc 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/server/errors.py 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/server/errors.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/face.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/face.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/server/feed.py 444 root bin
--- a/src/publish.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/publish.py	Fri Oct 23 17:43:37 2009 -0500
@@ -80,7 +80,7 @@
         pkgsend [options] command [cmd_options] [operands]
 
 Packager subcommands:
-        pkgsend create-repository
+        pkgsend create-repository --set-property <section.property>=<value>
         pkgsend open [-en] pkg_fmri
         pkgsend add action arguments
         pkgsend import [-T file_pattern] bundlefile ...
@@ -101,12 +101,29 @@
 def trans_create_repository(repo_uri, args):
         """Creates a new repository at the location indicated by repo_uri."""
 
-        if args:
-                usage(_("command does not take operands"),
-                    cmd="create-repository")
+        repo_props = {}
+        opts, pargs = getopt.getopt(args, "", ["set-property="])
+        for opt, arg in opts:
+                if opt == "--set-property":
+                        try:
+                                prop, p_value = arg.split("=", 1)
+                                p_sec, p_name = prop.split(".", 1)
+                        except ValueError:
+                                usage(_("property arguments must be of "
+                                    "the form '<section.property>="
+                                    "<value>'."), cmd="create-repository")
+                        repo_props.setdefault(p_sec, {})
+                        repo_props[p_sec][p_name] = p_value
 
         try:
-                trans.Transaction(repo_uri, create_repo=True)
+                trans.Transaction(repo_uri, create_repo=True,
+                    repo_props=repo_props)
+        except trans.TransactionRepositoryConfigError, e:
+                error(e, cmd="create-repository")
+                emsg(_("Invalid repository configuration values were "
+                    "specified using --set-property or required values are "
+                    "missing.  Please provide the correct and/or required "
+                    "values using the --set-property option."))
         except trans.TransactionError, e:
                 error(e, cmd="create-repository")
                 return 1
@@ -317,7 +334,7 @@
                                         action.attrs["path"].lstrip("/")
                         # omit set name=fmri actions
                         if action.name == "set" and \
-                            action.attrs["name"] == "fmri":
+                            action.attrs["name"] in ("fmri", "pkg.fmri"):
                                 continue
 
                         t.add(action)
--- a/src/pull.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/pull.py	Fri Oct 23 17:43:37 2009 -0500
@@ -36,15 +36,17 @@
 import urllib
 import urlparse
 
+import pkg.catalog as catalog
 import pkg.client.progress as progress
 import pkg.fmri
 import pkg.manifest as manifest
+import pkg.client.api_errors as api_errors
 import pkg.pkgtarfile as ptf
 import pkg.portable as portable
 import pkg.publish.transaction as trans
-import pkg.server.catalog as catalog
-import pkg.server.config as config
-import pkg.server.repository as repo
+import pkg.search_errors as search_errors
+import pkg.server.catalog as sc
+import pkg.server.repository as sr
 import pkg.server.repositoryconfig as rc
 import pkg.version as version
 
@@ -193,29 +195,25 @@
         parts = urlparse.urlparse(uri, "file", allow_fragments=0)
         path = urllib.url2pathname(parts[2])
 
-        scfg = config.SvrConfig(path, None, None)
-        scfg.set_read_only()
         try:
-                scfg.init_dirs()
-        except (config.SvrConfigError, EnvironmentError), e:
-                raise repo.RepositoryError(_("An error occurred while "
-                    "trying to initialize the repository directory "
-                    "structures:\n%s") % e)
-
-        scfg.acquire_in_flight()
-
-        try:
-                scfg.acquire_catalog()
-        except catalog.CatalogPermissionsException, e:
-                raise repo.RepositoryError(str(e))
-
-        try:
-                repo_cache[uri] = repo.Repository(scfg)
-        except rc.InvalidAttributeValueError, e:
-                raise repo.RepositoryError(_("The specified repository's "
-                    "configuration data is not valid:\n%s") % e)
-
-        return repo_cache[uri]
+                repo = sr.Repository(read_only=True, repo_root=path)
+        except EnvironmentError, _e:
+                error("an error occurred while trying to " \
+                    "initialize the repository directory " \
+                    "structures:\n%s" % _e)
+                sys.exit(1)
+        except sr.RepositoryError, _e:
+                error(_e)
+                sys.exit(1)
+        except rc.PropertyError, _e:
+                error("repository configuration error: %s" % _e)
+                sys.exit(1)
+        except (search_errors.IndexingException,
+            api_errors.PermissionsException), _e:
+                emsg(str(_e), "INDEX")
+                sys.exit(1)
+        repo_cache[uri] = repo
+        return repo
 
 def fetch_manifest(src_uri, pfmri):
         """Return the manifest data for package-fmri 'fmri' from the repository
@@ -225,7 +223,7 @@
                 try:
                         r = get_repo(src_uri)
                         m = file(r.manifest(pfmri), "rb")
-                except (EnvironmentError, repo.RepositoryError), e:
+                except (EnvironmentError, sr.RepositoryError), e:
                         abort(err=e)
         else:
                 # Request manifest from repository.
@@ -274,20 +272,28 @@
         except pkg.fmri.FmriError, e:
                 abort(err=e)
 
-        matches = catalog.extract_matching_fmris(fmri_list,
-            patterns=patterns, constraint=version.CONSTRAINT_AUTO,
-            counthash=counthash, matcher=pkg.fmri.glob_match)
-
-        bail = False
+        # XXX publisher prefixes have to be stripped for catalog matching
+        # for now; awaits v1 client support, etc.
+        pattern_pubs = {}
+        for f in patterns:
+                if f.publisher:
+                        pattern_pubs[f.get_fmri(anarchy=True)] = f.publisher
+                        f.publisher = None
 
-        for f in patterns:
-                if f not in counthash:
-                        emsg(_("No match found for %s") % f.pkg_name)
-                        bail = True
+        matches, unmatched = catalog.extract_matching_fmris(fmri_list,
+            patterns=patterns, constraint=version.CONSTRAINT_AUTO,
+            matcher=pkg.fmri.glob_match)
 
-        if bail:
+        if unmatched:
+                match_err = api_errors.InventoryException(**unmatched)
+                emsg(match_err)
                 abort()
 
+        # XXX restore stripped publisher information.
+        for m in matches:
+                pub = pattern_pubs.pop(str(m), None)
+                if pub:
+                        m.publisher = pub
         return matches
 
 def get_dependencies(src_uri, fmri_list, basedir, tracker):
@@ -386,7 +392,7 @@
         if src_uri.startswith("file://"):
                 try:
                         r = get_repo(src_uri)
-                except repo.RepositoryError, e:
+                except sr.RepositoryError, e:
                         abort(err=e)
 
                 for h in cshashes.keys():
@@ -405,8 +411,7 @@
                                         outfile = open(dest, "wb")
                                         gunzip_from_stream(src, outfile)
                                         outfile.close()
-                        except (EnvironmentError,
-                            repo.RepositoryError), e:
+                        except (EnvironmentError, sr.RepositoryError), e:
                                 try:
                                         portable.remove(dest)
                                 except:
@@ -509,8 +514,8 @@
         if src_uri.startswith("file://"):
                 try:
                         r = get_repo(src_uri)
-                        c = r.catalog()
-                except repo.RepositoryError, e:
+                        c = r.catalog_0()
+                except sr.RepositoryError, e:
                         error(e)
                         abort()
         else:
@@ -527,14 +532,14 @@
 
         # Call catalog.recv to retrieve catalog.
         try:
-                catalog.ServerCatalog.recv(c, cat_dir)
+                sc.ServerCatalog.recv(c, cat_dir)
         except Exception, e:
                 abort(err=_("Error: %s while reading from: %s") % (e, src_uri))
 
         if hasattr(c, "close"):
                 c.close()
 
-        cat = catalog.ServerCatalog(cat_dir, read_only=True)
+        cat = sc.ServerCatalog(cat_dir, read_only=True)
 
         d = {}
         fmri_list = []
@@ -605,12 +610,6 @@
         if pargs == None or len(pargs) == 0:
                 usage(_("must specify at least one pkgfmri"))
 
-        all_fmris = fetch_catalog(src_uri, tracker)
-        fmri_arguments = pargs
-        fmri_list = prune(list(set(expand_matching_fmris(all_fmris,
-            fmri_arguments))), all_versions, all_timestamps)
-
-        create_repo = False
         defer_refresh = False
         republish = False
 
@@ -623,14 +622,30 @@
 
                 # Files have to be decompressed for republishing.
                 keep_compressed = False
-
-                # Automatically create repository at target location if it
-                # doesn't exist.
                 if target.startswith("file://"):
-                        create_repo = True
                         # For efficiency, and publishing speed, don't update
                         # indexes until all file publishing is finished.
                         defer_refresh = True
+
+                        # Check to see if the repository exists first.
+                        try:
+                                t = trans.Transaction(target)
+                        except trans.TransactionRepositoryInvalidError, e:
+                                txt = str(e) + "\n\n"
+                                txt += _("To create a repository, use the "
+                                    "pkgsend command.")
+                                abort(err=msg)
+                        except trans.TransactionRepositoryConfigError, e:
+                                txt = str(e) + "\n\n"
+                                txt += _("The repository configuration for "
+                                    "the repository located at '%s' is not "
+                                    "valid or the specified path does not "
+                                    "exist.  Please correct the configuration "
+                                    "of the repository or create a new "
+                                    "one.") % target
+                                abort(err=txt)
+                        except trans.TransactionError, e:
+                                abort(err=e)
         else:
                 basedir = target
                 if not os.path.exists(basedir):
@@ -641,6 +656,11 @@
                                     basedir)
                                 return 1
 
+        all_fmris = fetch_catalog(src_uri, tracker)
+        fmri_arguments = pargs
+        fmri_list = prune(list(set(expand_matching_fmris(all_fmris,
+            fmri_arguments))), all_versions, all_timestamps)
+
         if recursive:
                 msg(_("Retrieving manifests for dependency evaluation ..."))
                 tracker.evaluate_start()
@@ -721,8 +741,8 @@
                 trans_id = get_basename(f)
 
                 try:
-                        t = trans.Transaction(target, create_repo=create_repo,
-                            pkg_name=pkg_name, trans_id=trans_id)
+                        t = trans.Transaction(target, pkg_name=pkg_name,
+                            trans_id=trans_id)
 
                         # Remove any previous failed attempt to
                         # to republish this package.
@@ -735,7 +755,8 @@
                         t.open()
                         for a in m.gen_actions():
                                 if a.name == "set" and \
-                                    a.attrs.get("name", "") == "fmri":
+                                    a.attrs.get("name", "") in ("fmri",
+                                    "pkg.fmri"):
                                         # To be consistent with the server,
                                         # the fmri can't be added to the
                                         # manifest.
--- a/src/tests/api/t_catalog.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/api/t_catalog.py	Fri Oct 23 17:43:37 2009 -0500
@@ -667,22 +667,35 @@
                 # Populate the catalog and then verify the Manifest signatures
                 # for each entry are correct.
                 cat.add_package(p1_fmri, manifest=p1_man)
-                entry = cat.get_entry(p1_fmri)
                 sigs = p1_man.signatures
-                for k, v in sigs.iteritems():
-                        self.assertEqual(entry["signature-%s" % k], sigs[k])
+                cat_sigs = dict(
+                    (s, v)
+                    for s, v in cat.get_entry_signatures(p1_fmri)
+                )
+                self.assertEqual(sigs, cat_sigs)
 
                 cat.add_package(p2_fmri, manifest=p2_man)
-                entry = cat.get_entry(p2_fmri)
                 sigs = p2_man.signatures
-                for k, v in sigs.iteritems():
-                        self.assertEqual(entry["signature-%s" % k], sigs[k])
+                cat_sigs = dict(
+                    (s, v)
+                    for s, v in cat.get_entry_signatures(p2_fmri)
+                )
+                self.assertEqual(sigs, cat_sigs)
 
                 cat.add_package(p3_fmri, manifest=p3_man)
-                entry = cat.get_entry(p3_fmri)
                 sigs = p3_man.signatures
-                for k, v in sigs.iteritems():
-                        self.assertEqual(entry["signature-%s" % k], sigs[k])
+                cat_sigs = dict(
+                    (s, v)
+                    for s, v in cat.get_entry_signatures(p3_fmri)
+                )
+                self.assertEqual(sigs, cat_sigs)
+
+                # Next, verify that removal of an FMRI not in the catalog will
+                # raise the expected exception.  Do this by removing an FMRI
+                # and then attempting to remove it again.
+                cat.remove_package(p3_fmri)
+                self.assertRaises(api_errors.UnknownCatalogEntry,
+                        cat.remove_package, p3_fmri)
 
         def test_07_updates(self):
                 """Verify that catalog updates are applied as expected."""
@@ -698,6 +711,9 @@
                 dup1 = catalog.Catalog(meta_root=dup1_path)
                 dup1.validate()
 
+                # No catalog updates should be needed.
+                self.assertEqual(dup1.get_updates_needed(orig.meta_root), None)
+
                 # Add some packages to the original.
                 pkg_src_list = [
                     fmri.PkgFmri("pkg://opensolaris.org/"
@@ -712,16 +728,34 @@
                 self.assertEqual(orig.package_version_count, 1)
                 self.assertEqual(dup1.package_version_count, 0)
 
-                # Since no catalog parts exist for the duplicate, there should
-                # be no updates needed.
+                # Only the new catalog parts should be listed as updates.
                 updates = dup1.get_updates_needed(orig.meta_root)
-                self.assertEqual(updates, [])
+                self.assertEqual(updates, set(["catalog.base.C"]))
 
                 # Now copy the existing catalog so that a baseline exists for
                 # incremental update testing.
                 shutil.rmtree(dup1_path)
                 shutil.copytree(cpath, dup1_path)
 
+                def apply_updates(src, dest):
+                        # Next, determine the updates that could be made to the
+                        # duplicate based on the original.
+                        updates = dest.get_updates_needed(src.meta_root)
+
+                        # Verify that the updates available to the original
+                        # catalog are the same as the updated needed to update
+                        # the duplicate.
+                        self.assertEqual(src.updates.keys(), updates)
+
+                        # Apply original catalog's updates to the duplicate.
+                        dest.apply_updates(src.meta_root)
+
+                        # Verify the contents.
+                        self.assertEqual(dest.package_version_count,
+                            src.package_version_count)
+                        self.assertEqual([f for f in dest.fmris()],
+                            [f for f in src.fmris()])
+
                 # Add some packages to the original.
                 pkg_src_list = [
                     fmri.PkgFmri("pkg://opensolaris.org/"
@@ -734,24 +768,46 @@
                         orig.add_package(f)
                 orig.save()
 
-                # Next, determine the updates that could be made to the
-                # duplicate based on the original.
+                # Load the duplicate and ensure it contains the expected data.
                 dup1 = catalog.Catalog(meta_root=dup1_path)
                 self.assertEqual(dup1.package_version_count, 1)
                 dup1.validate()
-                updates = dup1.get_updates_needed(orig.meta_root)
+
+                # Apply the updates and verify.
+                apply_updates(orig, dup1)
 
-                # Verify that the updates available to the original catalog
-                # are the same as the updated needed to update the duplicate.
-                self.assertEqual(orig.updates.keys(), updates)
+                # Now remove the packages that were added during the last
+                # update.
+                for f in pkg_src_list:
+                        orig.remove_package(f)
+                orig.save()
+
+                # Apply the updates and verify.
+                self.assertEqual(orig.package_version_count, 1)
+                apply_updates(orig, dup1)
 
-                # Apply original catalog's updates to the duplicate.
-                dup1.apply_updates(orig.meta_root)
+                # Now add back one of the packages removed.
+                for f in pkg_src_list:
+                        orig.add_package(f)
+                        break
+                orig.save()
+
+                # Apply the updates and verify.
+                self.assertEqual(orig.package_version_count, 2)
+                apply_updates(orig, dup1)
 
-                # Verify the contents.
-                self.assertEqual(dup1.package_version_count, 3)
-                self.assertEqual([f for f in dup1.fmris()],
-                    [f for f in orig.fmris()])
+                # Now remove the package we just added and add back both
+                # packages we first removed and attempt to update.
+                for f in pkg_src_list:
+                        orig.remove_package(f)
+                        break
+                for f in pkg_src_list:
+                        orig.add_package(f)
+                orig.save()
+
+                # Apply the updates and verify.
+                self.assertEqual(orig.package_version_count, 3)
+                apply_updates(orig, dup1)
 
         def test_08_append(self):
                 """Verify that append functionality works as expected."""
@@ -845,14 +901,17 @@
                         "[email protected],5.11-1:20000101T120040Z"),
                 ]
 
-                def ret_man(f):
+                def manifest_cb(cat, f):
                         if f.pkg_name == "apkg":
                                 return manifest.Manifest()
                         return self.__gen_manifest(f)
 
+                def ret_man(f):
+                        return manifest_cb(None, f)
+
                 # First, create a catalog (with callback) and populate it
                 # using only FMRIs.
-                nc = catalog.Catalog(manifest_cb=ret_man)
+                nc = catalog.Catalog(manifest_cb=manifest_cb)
                 for f in pkg_src_list:
                         nc.add_package(f)
                 self.__test_catalog_actions(nc, pkg_src_list)
@@ -866,7 +925,7 @@
 
                 # Third, create a catalog (with callback), but populate it
                 # using FMRIs and Manifests.
-                nc = catalog.Catalog(manifest_cb=ret_man)
+                nc = catalog.Catalog(manifest_cb=manifest_cb)
                 for f in pkg_src_list:
                         nc.add_package(f, manifest=ret_man(f))
                 self.__test_catalog_actions(nc, pkg_src_list)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/api/t_client.py	Fri Oct 23 17:43:37 2009 -0500
@@ -0,0 +1,102 @@
+#!/usr/bin/python2.4
+# -*- coding: utf-8 -*-
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+
+import logging
+import os
+import sys
+import unittest
+import StringIO
+
+# Set the path so that modules above can be found
+path_to_parent = os.path.join(os.path.dirname(__file__), "..")
+sys.path.insert(0, path_to_parent)
+
+import pkg5unittest
+from pkg.client import global_settings
+logger = global_settings.logger
+
+class _LogFilter(logging.Filter):
+        def __init__(self, max_level=logging.CRITICAL):
+                logging.Filter.__init__(self)
+                self.max_level = max_level
+
+        def filter(self, record):
+                return record.levelno <= self.max_level
+
+class TestSettings(pkg5unittest.Pkg5TestCase):
+
+        def test_logging(self):
+                global_settings.client_name = "TestSettings"
+
+                info_out = StringIO.StringIO()
+                error_out = StringIO.StringIO()
+
+                log_fmt = logging.Formatter()
+
+                # Enforce maximum logging level for informational messages.
+                info_h = logging.StreamHandler(info_out)
+                info_t = _LogFilter(logging.INFO)
+                info_h.addFilter(info_t)
+                info_h.setFormatter(log_fmt)
+                info_h.setLevel(logging.INFO)
+
+                # Log all warnings and above to stderr.
+                error_h = logging.StreamHandler(error_out)
+                error_h.setFormatter(log_fmt)
+                error_h.setLevel(logging.WARNING)
+
+                global_settings.info_log_handler = info_h
+                global_settings.error_log_handler = error_h
+
+                # Log some messages.
+                logger.debug("DEBUG")
+                logger.info("INFO")
+                logger.warning("WARNING")
+                logger.error("ERROR")
+                logger.critical("CRITICAL")
+
+                # Now verify that the expected output was received (DEBUG
+                # shouldn't be here due to log level).
+                self.assertEqual(info_out.getvalue(), "INFO\n")
+                self.assertEqual(error_out.getvalue(),
+                    "WARNING\nERROR\nCRITICAL\n")
+
+                # DEBUG should now be present in the info output.
+                info_out.truncate(0)
+                info_h.setLevel(logging.DEBUG)
+                logger.debug("DEBUG")
+                self.assertEqual(info_out.getvalue(), "DEBUG\n")
+
+                # Reset logging and verify info_out, error_out are no longer
+                # set to receive messagse.
+                global_settings.reset_logging()
+                self.assertNotEqual(global_settings.info_log_handler, info_h)
+                self.assertNotEqual(global_settings.error_log_handler, error_h)
+
+                logging.shutdown()
+
+if __name__ == "__main__":
+        unittest.main()
--- a/src/tests/api/t_repositoryconfig.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/api/t_repositoryconfig.py	Fri Oct 23 17:43:37 2009 -0500
@@ -39,20 +39,20 @@
 class TestRepositoryConfig(pkg5unittest.Pkg5TestCase):
         """Class to test the functionality of RepositoryConfig.
         """
-        __attrs = {
+        __props = {
             "publisher": {
                 "alias": {
-                    "type": rcfg.ATTR_TYPE_PUB_ALIAS,
+                    "type": rcfg.PROP_TYPE_PUB_ALIAS,
                     "default": "pending"
                 },
                 "prefix": {
-                    "type": rcfg.ATTR_TYPE_PUB_PREFIX,
+                    "type": rcfg.PROP_TYPE_PUB_PREFIX,
                     "default": "org.opensolaris.pending"
                 }
             },
             "repository": {
                 "collection_type": {
-                    "type": rcfg.ATTR_TYPE_REPO_COLL_TYPE,
+                    "type": rcfg.PROP_TYPE_REPO_COLL_TYPE,
                     "default": "supplemental"
                 },
                 "description": {
@@ -63,12 +63,12 @@
                         """contrib repository</a>."""
                 },
                 "detailed_url": {
-                    "type": rcfg.ATTR_TYPE_URI,
+                    "type": rcfg.PROP_TYPE_URI,
                     "default":
                         "http://opensolaris.org/os/community/sw-porters/contributing/",
                 },
                 "legal_uris": {
-                    "type": rcfg.ATTR_TYPE_URI_LIST,
+                    "type": rcfg.PROP_TYPE_URI_LIST,
                     "default": [
                         "http://www.opensolaris.org/os/copyrights/",
                         "http://www.opensolaris.org/os/tou/",
@@ -80,31 +80,31 @@
                         "Software Porters <[email protected]>"
                 },
                 "maintainer_url": {
-                    "type": rcfg.ATTR_TYPE_URI,
+                    "type": rcfg.PROP_TYPE_URI,
                     "default":
                         "http://www.opensolaris.org/os/community/sw-porters/"
                 },
                 "mirrors": {
-                    "type": rcfg.ATTR_TYPE_URI_LIST,
+                    "type": rcfg.PROP_TYPE_URI_LIST,
                     "default": []
                 },
                 "name": {
                     "default": """"Pending" Repository"""
                 },
                 "origins": {
-                    "type": rcfg.ATTR_TYPE_URI_LIST,
+                    "type": rcfg.PROP_TYPE_URI_LIST,
                     "default": ["http://pkg.opensolaris.org/pending"]
                 },
                 "refresh_seconds": {
-                    "type": rcfg.ATTR_TYPE_INT,
+                    "type": rcfg.PROP_TYPE_INT,
                     "default": 86400,
                 },
                 "registration_uri": {
-                    "type": rcfg.ATTR_TYPE_URI,
+                    "type": rcfg.PROP_TYPE_URI,
                     "default": "",
                 },
                 "related_uris": {
-                    "type": rcfg.ATTR_TYPE_URI_LIST,
+                    "type": rcfg.PROP_TYPE_URI_LIST,
                     "default": [
                         "http://pkg.opensolaris.org/contrib",
                         "http://jucr.opensolaris.org/pending",
@@ -114,7 +114,7 @@
             },
             "feed": {
                 "id": {
-                    "type": rcfg.ATTR_TYPE_UUID,
+                    "type": rcfg.PROP_TYPE_UUID,
                     "readonly": True,
                     "default": "16fd2706-8baf-433b-82eb-8c7fada847da"
                 },
@@ -132,13 +132,13 @@
                     "default": "pkg-block-logo.png"
                 },
                 "window": {
-                    "type": rcfg.ATTR_TYPE_INT,
+                    "type": rcfg.PROP_TYPE_INT,
                     "default": 24
                 },
-                # This attribute is only present in this test program so that
-                # boolean attributes can be tested.
+                # This property is only present in this test program so that
+                # boolean properties can be tested.
                 "enabled": {
-                    "type": rcfg.ATTR_TYPE_BOOL,
+                    "type": rcfg.PROP_TYPE_BOOL,
                     "default": True
                 }
             }
@@ -150,39 +150,42 @@
                 fd, self.sample_conf = tempfile.mkstemp()
                 f = os.fdopen(fd, "w")
 
-                # Merge any test attributes into RepositoryConfig's normal
-                # set so that we can test additional attribute data types.
-                attrs = self.__attrs
-                cattrs = rcfg.RepositoryConfig._attrs
-                for section in attrs:
-                        if section not in cattrs:
-                                cattrs[section] = copy.deepcopy(attrs[section])
+                self.remove = [self.sample_conf]
+
+                # Merge any test properties into RepositoryConfig's normal
+                # set so that we can test additional property data types.
+                props = self.__props
+                cprops = rcfg.RepositoryConfig._props
+                for section in props:
+                        if section not in cprops:
+                                cprops[section] = copy.deepcopy(props[section])
                                 continue
 
-                        for attr in attrs[section]:
-                                if attr not in cattrs[section]:
-                                        cattrs[section][attr] = copy.deepcopy(
-                                            attrs[section][attr])
+                        for prop in props[section]:
+                                if prop not in cprops[section]:
+                                        cprops[section][prop] = copy.deepcopy(
+                                            props[section][prop])
 
                 # Write out a sample configuration in ConfigParser format.
                 rc = rcfg.RepositoryConfig()
-                attrs = self.__attrs
-                for section in attrs:
+                props = self.__props
+                for section in props:
                         f.write("[%s]\n" % section)
-                        for attr in attrs[section]:
-                                atype = rc.get_attribute_type(section, attr)
-                                val = attrs[section][attr]["default"]
-                                if atype == rcfg.ATTR_TYPE_URI_LIST:
+                        for prop in props[section]:
+                                atype = rc.get_property_type(section, prop)
+                                val = props[section][prop]["default"]
+                                if atype == rcfg.PROP_TYPE_URI_LIST:
                                         val = ",".join(val)
-                                f.write("%s = %s\n" % (attr, val))
+                                f.write("%s = %s\n" % (prop, val))
                         f.write("\n")
                 f.close()
 
         def tearDown(self):
                 """Cleanup after our tests.
                 """
-                if os.path.exists(self.sample_conf):
-                        os.remove(self.sample_conf)
+                for f in self.remove:
+                        if os.path.exists(f):
+                                os.remove(f)
 
         def test_init(self):
                 """Verify that RepositoryConfig init accepts a pathname and
@@ -212,328 +215,336 @@
                 rc.read(self.sample_conf)
                 rc.write(self.sample_conf)
 
-        def test_get_attribute(self):
-                """Verify that each attribute's value in sample_conf matches
+        def test_get_property(self):
+                """Verify that each property's value in sample_conf matches
                 what we retrieved.
                 """
                 rc = rcfg.RepositoryConfig(pathname=self.sample_conf)
 
-                attrs = self.__attrs
-                for section in attrs:
-                        for attr in attrs[section]:
-                                returned = rc.get_attribute(section, attr)
+                props = self.__props
+                for section in props:
+                        for prop in props[section]:
+                                returned = rc.get_property(section, prop)
                                 self.assertEqual(returned,
-                                    attrs[section][attr]["default"])
+                                    props[section][prop]["default"])
 
-        def test_get_invalid_attribute(self):
-                """Verify that attempting to retrieve an invalid attribute will
-                result in an InvalidAttributeError exception.
+        def test_get_invalid_property(self):
+                """Verify that attempting to retrieve an invalid property will
+                result in an InvalidPropertyError exception.
                 """
                 rc = rcfg.RepositoryConfig()
-                self.assertRaises(rcfg.InvalidAttributeError, rc.get_attribute,
+                self.assertRaises(rcfg.InvalidPropertyError, rc.get_property,
                     "repository", "foo")
 
-        def test_get_attribute_type(self):
-                """Verify that each attribute's type matches the\
+        def test_get_property_type(self):
+                """Verify that each property's type matches the\
                 default object state.
                 """
                 rc = rcfg.RepositoryConfig()
-                attrs = self.__attrs
-                for section in attrs:
-                        for attr in attrs[section]:
-                                returned = rc.get_attribute_type(section, attr)
-                                expected = attrs[section][attr].get("type",
-                                    rcfg.ATTR_TYPE_STR)
+                props = self.__props
+                for section in props:
+                        for prop in props[section]:
+                                returned = rc.get_property_type(section, prop)
+                                expected = props[section][prop].get("type",
+                                    rcfg.PROP_TYPE_STR)
                                 try:
                                         self.assertEqual(returned, expected)
                                 except Exception, e:
                                         raise RuntimeError("An unexpected "
-                                            "attribute type was returned for "
-                                            "attribute '%s': '%s'")
+                                            "property type was returned for "
+                                            "property '%s': '%s'")
 
-        def test_get_attributes(self):
-                """Verify that all expected attributes were returned by
-                get_attributes and that each attribute returned can have its
+        def test_get_properties(self):
+                """Verify that all expected properties were returned by
+                get_properties and that each property returned can have its
                 value retrieved.
                 """
                 rc = rcfg.RepositoryConfig()
-                attrs = rc.get_attributes()
-                self.assertEqual(len(attrs), len(self.__attrs))
-                for section in attrs:
-                        self.assertEqual(len(attrs[section]),
-                            len(self.__attrs[section]))
-                        for attr in attrs[section]:
-                                rc.get_attribute(section, attr)
+                props = rc.get_properties()
+                self.assertEqual(len(props), len(self.__props))
+                for section in props:
+                        self.assertEqual(len(props[section]),
+                            len(self.__props[section]))
+                        for prop in props[section]:
+                                rc.get_property(section, prop)
 
-        def test_set_attribute(self):
-                """Verify that each attribute can be set (unless read-only) and
+        def test_set_property(self):
+                """Verify that each property can be set (unless read-only) and
                 that the set value matches what we expect both before and after
                 write().  Calling set for a read-only value should raise a
                 ValueError exception.
                 """
                 fd, sample_conf = tempfile.mkstemp()
+                self.remove.append(sample_conf)
                 rc = rcfg.RepositoryConfig()
-                attrs = self.__attrs
-                for section in attrs:
-                        for attr in attrs[section]:
-                                value = attrs[section][attr]["default"]
-                                readonly = attrs[section][attr].get("readonly",
+                props = self.__props
+                for section in props:
+                        for prop in props[section]:
+                                value = props[section][prop]["default"]
+                                readonly = props[section][prop].get("readonly",
                                     False)
                                 if readonly:
                                         self.assertRaises(
-                                            rcfg.ReadOnlyAttributeError,
-                                            rc.set_attribute, section, attr,
+                                            rcfg.ReadOnlyPropertyError,
+                                            rc.set_property, section, prop,
                                             value)
-                                        rc._set_attribute(section, attr, value)
+                                        rc._set_property(section, prop, value)
                                 else:
-                                        rc.set_attribute(section, attr, value)
+                                        rc.set_property(section, prop, value)
 
-                                returned = rc.get_attribute(section, attr)
+                                returned = rc.get_property(section, prop)
                                 self.assertEqual(returned, value)
 
                 rc.write(sample_conf)
+                os.close(fd)
 
                 rc = rcfg.RepositoryConfig(pathname=sample_conf)
-                for section in attrs:
-                        for attr in attrs[section]:
-                                value = attrs[section][attr]["default"]
-                                returned = rc.get_attribute(section, attr)
+                for section in props:
+                        for prop in props[section]:
+                                value = props[section][prop]["default"]
+                                returned = rc.get_property(section, prop)
                                 self.assertEqual(returned, value)
 
-        def test_set_invalid_attribute(self):
-                """Verify that attempting to set an invalid attribute will
-                result in an InvalidAttributeError exception.
+        def test_set_invalid_property(self):
+                """Verify that attempting to set an invalid property will
+                result in an InvalidPropertyError exception.
                 """
                 rc = rcfg.RepositoryConfig()
-                # Verify an exception is raised for an invalid attribute.
-                self.assertRaises(rcfg.InvalidAttributeError, rc.set_attribute,
+                # Verify an exception is raised for an invalid property.
+                self.assertRaises(rcfg.InvalidPropertyError, rc.set_property,
                     "repository", "foo", "baz")
 
                 # Verify that an exception is raised for an invalid section.
-                self.assertRaises(rcfg.InvalidAttributeError, rc.set_attribute,
+                self.assertRaises(rcfg.InvalidPropertyError, rc.set_property,
                     "bar", "id", None)
 
-        def test__set_invalid_attribute(self):
-                """Verify that attempting to _set an invalid attribute will
-                result in an InvalidAttributeError exception.
+        def test__set_invalid_property(self):
+                """Verify that attempting to _set an invalid property will
+                result in an InvalidPropertyError exception.
                 """
                 rc = rcfg.RepositoryConfig()
-                # Verify that it happens for an invalid attribute.
-                self.assertRaises(rcfg.InvalidAttributeError,
-                    rc.set_attribute, "repository", "foo", "bar")
+                # Verify that it happens for an invalid property.
+                self.assertRaises(rcfg.InvalidPropertyError,
+                    rc.set_property, "repository", "foo", "bar")
 
                 # Verify that it happens for an invalid section.
-                self.assertRaises(rcfg.InvalidAttributeError,
-                    rc._set_attribute, "bar", "id", "baz")
+                self.assertRaises(rcfg.InvalidPropertyError,
+                    rc._set_property, "bar", "id", "baz")
 
-        def test_is_valid_attribute(self):
-                """Verify that is_valid_attribute returns a boolean value
-                indicating the validity of the attribute or raises an
-                exception if raise_error=True and the attribute is
+        def test_is_valid_property(self):
+                """Verify that is_valid_property returns a boolean value
+                indicating the validity of the property or raises an
+                exception if raise_error=True and the property is
                 invalid.
                 """
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute.
-                self.assertFalse(rc.is_valid_attribute("repository", "foo"))
+                # Verify that False is returned for an invalid property.
+                self.assertFalse(rc.is_valid_property("repository", "foo"))
 
-                # Verify that False is returned for an invalid attribute
+                # Verify that False is returned for an invalid property
                 # section.
-                self.assertFalse(rc.is_valid_attribute("bar", "foo"))
+                self.assertFalse(rc.is_valid_property("bar", "foo"))
 
-                # Verify that True is returned for a valid attribute.
-                self.assertTrue(rc.is_valid_attribute("feed", "id"))
+                # Verify that True is returned for a valid property.
+                self.assertTrue(rc.is_valid_property("feed", "id"))
 
-                # Verify that an exception is raised for an invalid attribute.
-                self.assertRaises(rcfg.InvalidAttributeError,
-                    rc.is_valid_attribute, "repository", "foo",
+                # Verify that an exception is raised for an invalid property.
+                self.assertRaises(rcfg.InvalidPropertyError,
+                    rc.is_valid_property, "repository", "foo",
                     raise_error=True)
 
-                # Verify that an exception is raised for an invalid attribute
+                # Verify that an exception is raised for an invalid property
                 # section.
-                self.assertRaises(rcfg.InvalidAttributeError,
-                    rc.is_valid_attribute, "bar", "foo", raise_error=True)
+                self.assertRaises(rcfg.InvalidPropertyError,
+                    rc.is_valid_property, "bar", "foo", raise_error=True)
 
-        def test_is_valid_attribute_value(self):
-                """Verify that is_valid_attribute_value returns a boolean value
-                indicating the validity of the attribute value or raises an
-                exception if raise_error=True and the attribute value is
+        def test_is_valid_property_value(self):
+                """Verify that is_valid_property_value returns a boolean value
+                indicating the validity of the property value or raises an
+                exception if raise_error=True and the property value is
                 invalid.
                 """
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("feed", "window",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("feed", "window",
                     "foo"))
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("feed", "window",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("feed", "window",
                     24))
 
-                # Verify that an exception is raised for an invalid attribute
+                # Verify that an exception is raised for an invalid property
                 # value when raise_error=True.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "feed", "window", "foo",
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "feed", "window", "foo",
                     raise_error=True)
 
-        def test_is_valid_attribute_value_uuid(self):
-                """Verify that is_valid_attribute_value returns the expected
-                boolean value indicating the validity of UUID attribute values.
+        def test_is_valid_property_value_uuid(self):
+                """Verify that is_valid_property_value returns the expected
+                boolean value indicating the validity of UUID property values.
                 """
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("feed", "id",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("feed", "id",
                     "8baf-433b-82eb-8c7fada847da"))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "feed", "id",
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "feed", "id",
                     "8baf-433b-82eb-8c7fada847da", raise_error=True)
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("feed", "id",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("feed", "id",
                     "16fd2706-8baf-433b-82eb-8c7fada847da"))
 
-        def test_is_valid_attribute_value_bool(self):
-                """Verify that is_valid_attribute_value returns the expected
-                boolean value indicating the validity of bool attribute values.
+        def test_is_valid_property_value_bool(self):
+                """Verify that is_valid_property_value returns the expected
+                boolean value indicating the validity of bool property values.
                 """
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for invalid attribute values.
-                self.assertFalse(rc.is_valid_attribute_value("feed",
+                # Verify that False is returned for invalid property values.
+                self.assertFalse(rc.is_valid_property_value("feed",
                     "enabled", "foo"))
-                self.assertFalse(rc.is_valid_attribute_value("feed",
+                self.assertFalse(rc.is_valid_property_value("feed",
                     "enabled", "1"))
-                self.assertFalse(rc.is_valid_attribute_value("feed",
+                self.assertFalse(rc.is_valid_property_value("feed",
                     "enabled", "0"))
-                self.assertFalse(rc.is_valid_attribute_value("feed",
+                self.assertFalse(rc.is_valid_property_value("feed",
                     "enabled", "true"))
-                self.assertFalse(rc.is_valid_attribute_value("feed",
+                self.assertFalse(rc.is_valid_property_value("feed",
                     "enabled", "false"))
-                self.assertFalse(rc.is_valid_attribute_value("feed",
+                self.assertFalse(rc.is_valid_property_value("feed",
                     "enabled", ""))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "feed", "enabled", "",
+                # a missing property value.
+                self.assertRaises(rcfg.RequiredPropertyValueError,
+                    rc.is_valid_property_value, "feed", "enabled", "",
+                    raise_error=True)
+
+                # Verify that an exception is raised when raise_error=True for
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "feed", "id", "mumble",
                     raise_error=True)
 
-                # Verify that True is returned for valid attribute values.
-                self.assertTrue(rc.is_valid_attribute_value("feed",
+                # Verify that True is returned for valid property values.
+                self.assertTrue(rc.is_valid_property_value("feed",
                     "enabled", "True"))
-                self.assertTrue(rc.is_valid_attribute_value("feed",
+                self.assertTrue(rc.is_valid_property_value("feed",
                     "enabled", True))
-                self.assertTrue(rc.is_valid_attribute_value("feed",
+                self.assertTrue(rc.is_valid_property_value("feed",
                     "enabled", "False"))
-                self.assertTrue(rc.is_valid_attribute_value("feed",
+                self.assertTrue(rc.is_valid_property_value("feed",
                     "enabled", False))
 
-        def test_is_valid_attribute_value_uri(self):
-                """Verify that is_valid_attribute_value returns the expected
-                boolean value indicating the validity of uri attribute values.
+        def test_is_valid_property_value_uri(self):
+                """Verify that is_valid_property_value returns the expected
+                boolean value indicating the validity of uri property values.
                 """
 
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("repository",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("repository",
                     "registration_uri", "abc.123^@#$&)(*&#$)"))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "repository",
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "repository",
                     "registration_uri",
                     "abc.123^@#$&)(*&#$)", raise_error=True)
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("repository",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("repository",
                     "registration_uri", "https://pkg.sun.com/register"))
 
-        def test_is_valid_attribute_value_uri_list(self):
-                """Verify that is_valid_attribute_value returns the expected
-                boolean value indicating the validity of uri_list attribute
+        def test_is_valid_property_value_uri_list(self):
+                """Verify that is_valid_property_value returns the expected
+                boolean value indicating the validity of uri_list property
                 values.
                 """
 
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("repository",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("repository",
                     "mirrors", "http://example.com/mirror, abc.123^@#$&)(*&#$)"))
-                self.assertFalse(rc.is_valid_attribute_value("repository",
+                self.assertFalse(rc.is_valid_property_value("repository",
                     "mirrors", ","))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "repository", "mirrors",
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "repository", "mirrors",
                     "example.com,example.net", raise_error=True)
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("repository",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("repository",
                     "mirrors", ["http://example.com/mirror1",
                     "http://example.net/mirror2"]))
 
-        def test_is_valid_attribute_value_pub_alias(self):
-                """Verify that is_valid_attribute_value returns the expected
+        def test_is_valid_property_value_pub_alias(self):
+                """Verify that is_valid_property_value returns the expected
                 boolean value indicating the validity of publisher alias
-                attribute values.
+                property values.
                 """
 
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("publisher",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("publisher",
                     "alias", "abc.123^@#$&)(*&#$)"))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "publisher", "alias",
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "publisher", "alias",
                     "abc.123^@#$&)(*&#$)", raise_error=True)
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("publisher",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("publisher",
                     "alias", "bobcat"))
 
-        def test_is_valid_attribute_value_pub_prefix(self):
-                """Verify that is_valid_attribute_value returns the expected
+        def test_is_valid_property_value_pub_prefix(self):
+                """Verify that is_valid_property_value returns the expected
                 boolean value indicating the validity of publisher prefix
-                attribute values.
+                property values.
                 """
 
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("publisher",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("publisher",
                     "prefix", "abc.123^@#$&)(*&#$)"))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "publisher", "prefix",
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "publisher", "prefix",
                     "abc.123^@#$&)(*&#$)", raise_error=True)
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("publisher",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("publisher",
                     "prefix", "xkcd.net"))
 
-        def test_is_valid_attribute_value_repo_coll_type(self):
-                """Verify that is_valid_attribute_value returns the expected
+        def test_is_valid_property_value_repo_coll_type(self):
+                """Verify that is_valid_property_value returns the expected
                 boolean value indicating the validity of repository collection
-                type attribute values.
+                type property values.
                 """
 
                 rc = rcfg.RepositoryConfig()
-                # Verify that False is returned for an invalid attribute value.
-                self.assertFalse(rc.is_valid_attribute_value("repository",
+                # Verify that False is returned for an invalid property value.
+                self.assertFalse(rc.is_valid_property_value("repository",
                     "collection_type", "donotwant"))
 
                 # Verify that an exception is raised when raise_error=True for
-                # an invalid attribute value.
-                self.assertRaises(rcfg.InvalidAttributeValueError,
-                    rc.is_valid_attribute_value, "repository",
+                # an invalid property value.
+                self.assertRaises(rcfg.InvalidPropertyValueError,
+                    rc.is_valid_property_value, "repository",
                     "collection_type", "donotwant", raise_error=True)
 
-                # Verify that True is returned for a valid attribute value.
-                self.assertTrue(rc.is_valid_attribute_value("repository",
+                # Verify that True is returned for a valid property value.
+                self.assertTrue(rc.is_valid_property_value("repository",
                     "collection_type", "supplemental"))
 
         def test_missing_conffile(self):
--- a/src/tests/baseline.txt	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/baseline.txt	Fri Oct 23 17:43:37 2009 -0500
@@ -14,6 +14,7 @@
 api.t_catalog.py TestEmptyCatalog.test_01_attrs|pass
 api.t_catalog.py TestEmptyCatalog.test_02_extract_matching_fmris|pass
 api.t_catalog.py TestEmptyCatalog.test_03_actions|pass
+api.t_client.py TestSettings.test_logging|pass
 api.t_dependencies.py TestDependencyAnalyzer.test_ext_elf|pass
 api.t_dependencies.py TestDependencyAnalyzer.test_ext_hardlink|pass
 api.t_dependencies.py TestDependencyAnalyzer.test_ext_pb|pass
@@ -144,26 +145,26 @@
 api.t_publisher.py TestPublisher.test_01_repository_uri|pass
 api.t_publisher.py TestPublisher.test_02_repository|pass
 api.t_publisher.py TestPublisher.test_03_publisher|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test__set_invalid_attribute|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_get_attribute|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_get_attribute_type|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_get_attributes|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_get_invalid_attribute|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test__set_invalid_property|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_get_invalid_property|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_get_properties|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_get_property|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_get_property_type|pass
 api.t_repositoryconfig.py TestRepositoryConfig.test_init|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_bool|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_pub_alias|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_pub_prefix|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_repo_coll_type|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_uri|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_uri_list|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_attribute_value_uuid|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_bool|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_pub_alias|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_pub_prefix|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_repo_coll_type|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_uri|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_uri_list|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_is_valid_property_value_uuid|pass
 api.t_repositoryconfig.py TestRepositoryConfig.test_missing_conffile|pass
 api.t_repositoryconfig.py TestRepositoryConfig.test_multi_read_write|pass
 api.t_repositoryconfig.py TestRepositoryConfig.test_read|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_set_attribute|pass
-api.t_repositoryconfig.py TestRepositoryConfig.test_set_invalid_attribute|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_set_invalid_property|pass
+api.t_repositoryconfig.py TestRepositoryConfig.test_set_property|pass
 api.t_repositoryconfig.py TestRepositoryConfig.test_write|pass
 api.t_smf.py TestSMF.test_get_info_1|pass
 api.t_smf.py TestSMF.test_is_smf_manifest1|pass
@@ -312,6 +313,7 @@
 cli.t_pkg_api_install.py TestPkgApiInstall.test_bug_1338_4|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_bug_2795|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_bug_4109|pass
+cli.t_pkg_api_install.py TestPkgApiInstall.test_catalog_v0|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_image_upgrade|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_install_matching|error
 cli.t_pkg_api_install.py TestPkgApiInstall.test_nonrecursive_dependent_uninstall|pass
@@ -325,6 +327,7 @@
 cli.t_pkg_depotd.py TestDepotController.testBadArgs|pass
 cli.t_pkg_depotd.py TestDepotController.testStartStop|pass
 cli.t_pkg_depotd.py TestDepotController.test_cfg_file|pass
+cli.t_pkg_depotd.py TestDepotController.test_disable_ops|pass
 cli.t_pkg_depotd.py TestDepotController.test_writable_root|pass
 cli.t_pkg_depotd.py TestDepotOutput.test_0_depot_bui_output|pass
 cli.t_pkg_depotd.py TestDepotOutput.test_1_depot_publisher|pass
@@ -337,7 +340,6 @@
 cli.t_pkg_depotd.py TestPkgDepot.test_bug_4489|pass
 cli.t_pkg_depotd.py TestPkgDepot.test_bug_5366|pass
 cli.t_pkg_depotd.py TestPkgDepot.test_bug_5707|pass
-cli.t_pkg_depotd.py TestPkgDepot.test_bug_8010|pass
 cli.t_pkg_depotd.py TestPkgDepot.test_depot_ping|pass
 cli.t_pkg_depotd.py TestPkgDepot.test_face_root|pass
 cli.t_pkg_depotd.py TestPkgDepot.test_repo_create|pass
@@ -486,6 +488,7 @@
 cli.t_pkg_publisher.py TestPkgPublisherMany.test_enable_disable|pass
 cli.t_pkg_rebuild_index.py TestPkgRebuildIndex.test_rebuild_index_bad_opts|pass
 cli.t_pkg_rebuild_index.py TestPkgRebuildIndex.test_rebuild_index_bad_perms|pass
+cli.t_pkg_refresh.py TestPkgRefreshMulti.test_catalog_v1|pass
 cli.t_pkg_refresh.py TestPkgRefreshMulti.test_general_refresh|pass
 cli.t_pkg_refresh.py TestPkgRefreshMulti.test_refresh_certificate_problems|pass
 cli.t_pkg_refresh.py TestPkgRefreshMulti.test_refresh_cli_options|pass
@@ -505,12 +508,12 @@
 cli.t_pkg_verify.py TestPkgVerify.test_bug_1463|pass
 cli.t_pkg_verify.py TestPkgVerify.test_pkg_verify_bad_opts|pass
 cli.t_pkg_version.py TestPkgVersion.test_pkg_version_bad_opts|pass
+cli.t_pkgdep.py TestPkgdepBasics.test_bug_10518|pass
+cli.t_pkgdep.py TestPkgdepBasics.test_bug_11517|pass
+cli.t_pkgdep.py TestPkgdepBasics.test_bug_11829|pass
 cli.t_pkgdep.py TestPkgdepBasics.test_opts|pass
 cli.t_pkgdep.py TestPkgdepBasics.test_output|pass
 cli.t_pkgdep.py TestPkgdepBasics.test_resolve_screen_out|pass
-cli.t_pkgdep.py TestPkgdepBasics.test_bug_10518|pass
-cli.t_pkgdep.py TestPkgdepBasics.test_bug_11517|pass
-cli.t_pkgdep.py TestPkgdepBasics.test_bug_11829|pass
 cli.t_pkgdep_resolve.py TestApiDependencies.test_bug_11518|pass
 cli.t_pkgdep_resolve.py TestApiDependencies.test_resolve_cross_package|pass
 cli.t_pkgdep_resolve.py TestApiDependencies.test_resolve_mix|pass
@@ -537,8 +540,11 @@
 cli.t_pkgsend.py TestPkgsendBasics.test_7_create_repo|pass
 cli.t_pkgsend.py TestPkgsendBasics.test_8_bug_7908|pass
 cli.t_pkgsend.py TestPkgsendBasics.test_9_multiple_dirs|pass
+cli.t_publish_api.py TestPkgPublicationApi.test_stress_file_publish|pass
+cli.t_publish_api.py TestPkgPublicationApi.test_stress_http_publish|pass
 cli.t_setUp.py TestSetUp.test_first_depot_start|pass
 cli.t_setUp.py TestSetUp.test_second_depot_start|pass
+cli.t_util_merge.py TestUtilMerge.test_0_merge|pass
 cli.t_variants.py TestPkgVariants.test_old_zones_pkgs|pass
 cli.t_variants.py TestPkgVariants.test_variant_1|pass
 gui.t_pm_addrepo.py TestPkgGuiAddRepoBasics.testAddRepository|pass
--- a/src/tests/cli/t_api.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_api.py	Fri Oct 23 17:43:37 2009 -0500
@@ -100,7 +100,7 @@
         misc_files = [ "libc.so.1" ]
 
         def setUp(self):
-                testutils.SingleDepotTestCase.setUp(self)
+                testutils.SingleDepotTestCase.setUp(self, publisher="bobcat")
 
                 self.foo12 = self.foo12.replace("$test_prefix",
                     self.get_test_prefix())
@@ -157,7 +157,7 @@
         def test_bad_orderings(self):
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.foo10)
-                self.image_create(durl)
+                self.image_create(durl, prefix="bobcat")
 
                 progresstracker = progress.NullProgressTracker()
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
@@ -201,7 +201,7 @@
 
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.foo10)
-                self.image_create(durl)
+                self.image_create(durl, prefix="bobcat")
 
                 progresstracker = progress.NullProgressTracker()
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
@@ -274,7 +274,7 @@
                 accessible and return expected values."""
 
                 durl = self.dc.get_depot_url()
-                self.image_create(durl)
+                self.image_create(durl, prefix="bobcat")
 
                 progresstracker = progress.NullProgressTracker()
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
@@ -377,10 +377,12 @@
                 ffoo = fmri.PkgFmri(plist[0])
                 sfoo = str(ffoo).replace(":%s" % ffoo.version.timestr, "")
                 ffoo = fmri.PkgFmri(sfoo)
+                sfoo = ffoo.get_fmri(anarchy=True)
 
                 fbar = fmri.PkgFmri(plist[1])
                 sbar = str(fbar).replace(":%s" % fbar.version.timestr, "")
                 fbar = fmri.PkgFmri(sbar)
+                sbar = fbar.get_fmri(anarchy=True)
 
                 # Build a simple list of packages.
                 pnames = {
--- a/src/tests/cli/t_api_search.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_api_search.py	Fri Oct 23 17:43:37 2009 -0500
@@ -130,7 +130,7 @@
 close """
 
         bogus_pkg10 = """
-set name=fmri value=pkg:/[email protected],5.11-0:20090326T233451Z
+set name=pkg.fmri value=pkg:/[email protected],5.11-0:20090326T233451Z
 set name=description value=""validation with simple chains of constraints ""
 set name=pkg.description value="pseudo-hashes as arrays tied to a "type" (list of fields)"
 depend fmri=XML-Atom-Entry
@@ -138,6 +138,13 @@
 """
         bogus_fmri = fmri.PkgFmri("[email protected],5.11-0:20090326T233451Z")
 
+        remote_fmri_string = ('pkg:/[email protected]', 'test/example_pkg',
+            'set name=pkg.fmri value=pkg://test/[email protected],5.11-0:')
+
+        res_remote_pkg = set([
+            remote_fmri_string
+        ])
+
         res_remote_path = set([
             ("pkg:/[email protected]", "basename","file 820157a2043e3135f342b238129b556aade20347 chash=bfa46fc98d1ca97f1260090797d35a35e76096a3 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=38 pkg.size=18")
         ])
@@ -185,15 +192,16 @@
         ])
 
         res_remote_wildcard = res_remote_path.union(set([
+            remote_fmri_string,
             ('pkg:/[email protected]', 'basename', 'dir group=bin mode=0755 owner=root path=bin/example_dir')
         ]))
 
         res_remote_glob = set([
+            remote_fmri_string,
             ('pkg:/[email protected]', 'path', 'dir group=bin mode=0755 owner=root path=bin/example_dir'),
             ('pkg:/[email protected]', 'basename', 'dir group=bin mode=0755 owner=root path=bin/example_dir'),
             ('pkg:/[email protected]', 'path', 'file 820157a2043e3135f342b238129b556aade20347 chash=bfa46fc98d1ca97f1260090797d35a35e76096a3 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=38 pkg.size=18')
         ]) | res_remote_path
-                                
 
         res_remote_foo = set([
             ('pkg:/[email protected]', 'FOOO', 'set name=description value="FOOO bAr O OO OOO"')
@@ -216,11 +224,11 @@
         ])
 
         res_remote_right_brace = set([
-            ('pkg:/[email protected]', ']', 'set name=weirdness value="] [ * ?"')    
+            ('pkg:/[email protected]', ']', 'set name=weirdness value="] [ * ?"')
         ])
-        
+
         local_fmri_string = ('pkg:/[email protected]', 'test/example_pkg',
-            'set name=fmri value=pkg://test/[email protected],5.11-0:')
+            'set name=pkg.fmri value=pkg://test/[email protected],5.11-0:')
 
         res_local_pkg = set([
                 local_fmri_string
@@ -248,7 +256,7 @@
         res_local_bar = copy.copy(res_remote_bar)
 
         res_local_openssl = copy.copy(res_remote_openssl)
-        
+
         res_local_path_example11 = set([
             ("pkg:/[email protected]", "basename", "file 820157a2043e3135f342b238129b556aade20347 chash=bfa46fc98d1ca97f1260090797d35a35e76096a3 group=bin mode=0555 owner=root path=bin/example_path11 pkg.csize=38 pkg.size=18")
         ])
@@ -258,7 +266,7 @@
         ])
 
         res_local_pkg_example11 = set([
-            ("pkg:/[email protected]", "test/example_pkg", "set name=fmri value=pkg://test/[email protected],5.11-0:")
+            ("pkg:/[email protected]", "test/example_pkg", "set name=pkg.fmri value=pkg://test/[email protected],5.11-0:")
         ])
 
         res_local_wildcard_example11 = set([
@@ -297,20 +305,18 @@
             ('pkg:/[email protected]', 'variant', 'set name=description value="sparc variant" variant.arch=sparc')
         ])
 
-        res_remote_fat10_star = res_fat10_sparc | res_fat10_i386
+        fat_10_fmri_string = set([('pkg:/[email protected]', 'test/fat', 'set name=pkg.fmri value=pkg://test/[email protected],5.11-0:')])
+
+        res_remote_fat10_star = fat_10_fmri_string | res_fat10_sparc | res_fat10_i386
 
-        local_fat_10_fmri_string = set([('pkg:/[email protected]', 'test/fat', 'set name=fmri value=pkg://test/[email protected],5.11-0:')])
-        
         res_local_fat10_i386_star = res_fat10_i386.union(set([
-            ('pkg:/[email protected]', 'test', 'set name=publisher value=test'),
             ('pkg:/[email protected]', 'sparc', 'set name=variant.arch value=sparc value=i386')
-        ])).union(local_fat_10_fmri_string)
+        ])).union(fat_10_fmri_string)
 
         res_local_fat10_sparc_star = res_fat10_sparc.union(set([
-            ('pkg:/[email protected]', 'test', 'set name=publisher value=test'),
             ('pkg:/[email protected]', 'i386', 'set name=variant.arch value=sparc value=i386')
-        ])).union(local_fat_10_fmri_string)
-        
+        ])).union(fat_10_fmri_string)
+
         res_space_with_star = set([
             ('pkg:/[email protected]', 'basename', 'file 820157a2043e3135f342b238129b556aade20347 chash=bfa46fc98d1ca97f1260090797d35a35e76096a3 group=sys mode=0444 owner=nobody path="unique/with a space" pkg.csize=38 pkg.size=18')
         ])
@@ -398,7 +404,7 @@
              '820157a2043e3135f342b238129b556aade20347',
              'file 820157a2043e3135f342b238129b556aade20347 chash=bfa46fc98d1ca97f1260090797d35a35e76096a3 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=38 pkg.size=18')
         ]) | res_remote_path
-        
+
         res_remote_url = set([
             ('pkg:/[email protected]',
             'http://service.opensolaris.com/xml/pkg/[email protected],5.11-1:20080514I120000Z',
@@ -449,8 +455,8 @@
         fast_add_after_second_update = set(["VERSION: 2\n"])
 
         fast_remove_after_second_update = set(["VERSION: 2\n"])
-        
-        debug_features = None
+
+        debug_features = []
 
         def setUp(self):
                 for p in self.misc_files:
@@ -489,11 +495,11 @@
 
         @staticmethod
         def _replace_act(act):
-                if act.startswith('set name=fmri'):
+                if act.startswith('set name=pkg.fmri'):
                         return act.strip().rsplit(":", 1)[0] + ":"
                 else:
                         return act.strip()
-                        
+
         @staticmethod
         def _extract_action_from_res(it):
                 return (
@@ -585,11 +591,8 @@
                 self._check(set(res), test_value)
 
         def _run_full_remote_tests(self, api_obj):
-                # Set to 1 since searches can't currently be performed
-                # package name unless it's set inside the
-                # manifest which happens at install time on
-                # the client side.
-                self._search_op(api_obj, True, "example_pkg", set())
+                self._search_op(api_obj, True, "example_pkg",
+                    self.res_remote_pkg)
                 self._search_op(api_obj, True, "example_path",
                     self.res_remote_path)
                 self._search_op(api_obj, True, "(example_path)",
@@ -683,11 +686,8 @@
                     self._search_op, api_obj, True, "e* OR <e*>", set())
 
         def _run_remote_tests(self, api_obj):
-                # Set to 1 since searches can't currently be performed
-                # package name unless it's set inside the
-                # manifest which happens at install time on
-                # the client side.
-                self._search_op(api_obj, True, "example_pkg", set())
+                self._search_op(api_obj, True, "example_pkg",
+                    self.res_remote_pkg)
                 self._search_op(api_obj, True, "example_path",
                     self.res_remote_path)
                 self._search_op(api_obj, True, "::com.sun.service.info_url:",
@@ -1176,12 +1176,12 @@
         def __init__(self, *args, **kwargs):
                 TestApiSearchBasics.__init__(self, *args, **kwargs)
                 self.sent_pkgs = set()
-        
+
         def pkgsend_bulk(self, durl, pkg, optional=True):
                 if pkg not in self.sent_pkgs or optional == False:
                         self.sent_pkgs.add(pkg)
                         TestApiSearchBasics.pkgsend_bulk(self, durl, pkg)
-        
+
         def test_010_remote(self):
                 """Test remote search."""
                 durl = self.dc.get_depot_url()
@@ -1277,11 +1277,11 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: False, PKG_CLIENT_NAME)
                 self._do_install(api_obj, ["example_pkg"])
-                
+
                 index_dir = os.path.join(self.img_path, "var","pkg","index")
 
                 first = True
-                
+
                 for d in query_parser.TermQuery._global_data_dict.values():
                         orig_fn = d.get_file_name()
                         orig_path = os.path.join(index_dir, orig_fn)
@@ -1311,7 +1311,7 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: False, PKG_CLIENT_NAME)
                 self._do_install(api_obj, ["example_pkg"])
-                
+
                 index_dir = os.path.join(self.img_path, "var","pkg","index")
 
                 first = True
@@ -1431,7 +1431,7 @@
                 """Install one package, and run the search suite."""
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.fat_pkg10)
-                
+
                 self.image_create(durl,
                     additional_args="--variant variant.arch=sparc")
                 progresstracker = progress.NullProgressTracker()
@@ -1590,7 +1590,7 @@
                         index_dir, index_dir_tmp = self._get_index_dirs()
 
                         shutil.copytree(index_dir, index_dir_tmp)
-                
+
                         self._do_install(api_obj, ["example_pkg"])
 
                         f(index_dir, index_dir_tmp)
@@ -1616,7 +1616,7 @@
                         self._do_install(api_obj, ["example_pkg"])
 
                         index_dir, index_dir_tmp = self._get_index_dirs()
-                
+
                         shutil.copytree(index_dir, index_dir_tmp)
 
                         self._do_install(api_obj, ["another_pkg"])
@@ -1628,14 +1628,14 @@
                             self._do_uninstall, api_obj, ["another_pkg"])
 
                         self.image_destroy()
-                
+
         def test_bug_2989_3(self):
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.example_pkg10)
                 self.pkgsend_bulk(durl, self.example_pkg11)
 
                 for f in self._dir_restore_functions:
-                
+
                         self.image_create(durl)
                         progresstracker = progress.NullProgressTracker()
                         api_obj = api.ImageInterface(self.get_img_path(),
@@ -1664,16 +1664,16 @@
                 self.pkgsend_bulk(durl, self.example_pkg11)
 
                 for f in self._dir_restore_functions:
-                
+
                         self.image_create(durl)
                         progresstracker = progress.NullProgressTracker()
                         api_obj = api.ImageInterface(self.get_img_path(),
                             API_VERSION, progresstracker, lambda x: False,
                             PKG_CLIENT_NAME)
                         self._do_install(api_obj, ["another_pkg"])
-                
+
                         index_dir, index_dir_tmp = self._get_index_dirs()
-                        
+
                         shutil.copytree(index_dir, index_dir_tmp)
 
                         self._do_install(api_obj, ["[email protected],5.11-0"])
@@ -1796,8 +1796,11 @@
                 main_2 = fh.readlines()
                 new_main_len = len(main_2)
                 fh.close()
-                self.assert_(new_tok_len == tok_len)
-                self.assert_(new_main_len == main_len)
+                # Since the server now adds a set action for the FMRI to
+                # manifests during publication, there should be one
+                # additional line for the token file.
+                self.assertEqual(new_tok_len, tok_len + 1)
+                self.assertEqual(new_main_len, main_len + 1)
 
         def test_bug_983(self):
                 """Test for known bug 983."""
@@ -1845,7 +1848,7 @@
                     query_parser.TermQuery._global_data_dict.values()[0].\
                     get_file_name())
                 dest_fn = orig_fn + "TMP"
-                
+
                 self._do_install(api_obj, ["example_pkg"])
                 api_obj.rebuild_search_index()
 
@@ -1874,7 +1877,7 @@
                 indexer.MAX_ADDED_NUMBER_PACKAGES packages one after another
                 doesn't cause any type of indexing error."""
                 def _remove_extra_info(v):
-                        return v.split("-")[0]                
+                        return v.split("-")[0]
                 durl = self.dc.get_depot_url()
                 pkg_list = []
                 for i in range(0, indexer.MAX_ADDED_NUMBER_PACKAGES + 3):
@@ -1967,7 +1970,7 @@
                         expected_code),
                     urllib2.urlopen, durl + "/search/1/" + q_str)
 
-        def test_bug_9845_03(self): 
+        def test_bug_9845_03(self):
                 """Test that a corrupt return_type value doesn't break the "
                 server."""
                 durl = self.dc.get_depot_url()
@@ -2099,7 +2102,7 @@
                     urllib2.urlopen, durl + "/search/1/" + q_str)
 
 
-class TestApiSearchBasicsRestartingDepot(TestApiSearchBasics):        
+class TestApiSearchBasicsRestartingDepot(TestApiSearchBasics):
         def setUp(self):
                 self.debug_features = ["headers"]
                 TestApiSearchBasics.setUp(self)
@@ -2227,7 +2230,7 @@
                     "writ_root")
                 writ_dir = os.path.join(writable_root, "index")
                 self.dc.set_writable_root(writable_root)
-                
+
                 self.image_create(durl)
                 progresstracker = progress.NullProgressTracker()
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
@@ -2243,13 +2246,13 @@
 
                 self.pkgsend_bulk(durl, self.example_pkg10)
                 self.__wait_for_indexing(os.path.join(writ_dir, "TMP"))
-                
+
                 # Check when depot contains a package.
                 self.__corrupt_depot(writ_dir)
                 self.__wait_for_indexing(os.path.join(writ_dir, "TMP"))
                 self.assert_(not os.path.isdir(ind_dir))
                 self._run_remote_tests(api_obj)
-                
+
         def test_bug_8318(self):
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.example_pkg10)
@@ -2260,14 +2263,14 @@
                 uuids = []
                 for p in api_obj.img.gen_publishers():
                         uuids.append(p.client_uuid)
-                
+
                 self._search_op(api_obj, True, "example_path",
                     self.res_remote_path)
                 self._search_op(api_obj, True, "example_path",
                     self.res_remote_path, servers=[{"origin": durl}])
                 lfh = file(self.dc.get_logpath(), "rb")
                 found = 0
-                num_expected = 6
+                num_expected = 8
                 for line in lfh:
                         if "X-IPKG-UUID:" in line:
                                 tmp = line.split()
@@ -2290,19 +2293,20 @@
             close """
 
         res_alternate_server_local = set([
-            ('pkg:/[email protected]', 'test2/example_pkg', 'set name=fmri value=pkg://test2/[email protected],5.11-0:')
+            ('pkg:/[email protected]', 'test2/example_pkg',
+            'set name=pkg.fmri value=pkg://test2/[email protected],5.11-0:')
         ])
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 3,
-                    debug_features=["headers"])
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                    "test3"], debug_features=["headers"])
 
                 self.durl1 = self.dcs[1].get_depot_url()
                 self.durl2 = self.dcs[2].get_depot_url()
                 self.durl3 = self.dcs[3].get_depot_url()
                 self.pkgsend_bulk(self.durl2, self.example_pkg10)
 
-                self.image_create(self.durl1, prefix = "test1")
+                self.image_create(self.durl1, prefix="test1")
                 self.pkg("set-publisher -O " + self.durl2 + " test2")
 
         def _check(self, proposed_answer, correct_answer):
@@ -2364,14 +2368,14 @@
                 progresstracker = progress.NullProgressTracker()
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
-                
+
                 self._search_op(api_obj, True,
                     "this_should_not_match_any_token", set())
                 self._search_op(api_obj, True, "example_path",
                     set(), servers=[{"origin": self.durl1}])
                 self._search_op(api_obj, True, "example_path",
                     set(), servers=[{"origin": self.durl3}])
-                num_expected = { 1: 7, 2: 3, 3: 0 }
+                num_expected = { 1: 7, 2: 4, 3: 0 }
                 for d in range(1,(len(self.dcs) + 1)):
                         try:
                                 pub = api_obj.img.get_publisher(
--- a/src/tests/cli/t_pkg_api_install.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_api_install.py	Fri Oct 23 17:43:37 2009 -0500
@@ -540,6 +540,40 @@
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
                     check_illegal, api_obj.plan_install, ["/foo"], [])
 
+        def test_catalog_v0(self):
+                """Test install from a publisher's repository that only supports
+                catalog v0, and then the transition from v0 to v1."""
+
+                self.dc.stop()
+                self.dc.set_disable_ops(["catalog/1"])
+                self.dc.start()
+
+                durl = self.dc.get_depot_url()
+                self.pkgsend_bulk(durl, self.foo10)
+                self.image_create(durl)
+
+                progresstracker = progress.NullProgressTracker()
+                api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
+                    progresstracker, lambda x: False, PKG_CLIENT_NAME)
+                self.__do_install(api_obj, ["foo"])
+
+                api_obj.reset()
+                self.__do_uninstall(api_obj, ["foo"])
+ 
+                api_obj.reset()
+                self.__do_install(api_obj, ["pkg://test/foo"])
+
+                self.pkgsend_bulk(durl, self.bar10)
+                self.dc.stop()
+                self.dc.unset_disable_ops()
+                self.dc.start()
+
+                api_obj.reset()
+                api_obj.refresh(immediate=True)
+
+                api_obj.reset()
+                self.__do_install(api_obj, ["pkg://test/[email protected]"])
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_depotd.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_depotd.py	Fri Oct 23 17:43:37 2009 -0500
@@ -30,6 +30,7 @@
 import httplib
 import os
 import pkg.depotcontroller as dc
+import pkg.fmri as fmri
 import pkg.misc as misc
 import pkg.p5i as p5i
 import pkg.server.repositoryconfig as rcfg
@@ -244,43 +245,6 @@
                 self.pkg("install bar")
                 self.pkg("verify")
 
-        def test_bug_8010(self):
-                """Publish stuff to the depot, get a full version of the
-                catalog.  Extract the Last-Modified timestamp from the
-                catalog and request an incremental update with the Last-Modified
-                timestamp.  Server should return a HTTP 304.  Fail if this
-                is not the case."""
-
-                depot_url = self.dc.get_depot_url()
-                self.pkgsend_bulk(depot_url, self.update10)
-                c, v = misc.versioned_urlopen(depot_url, "catalog", [0])
-                lm = c.info().getheader("Last-Modified", None)
-                self.assert_(lm)
-                hdr = {"If-Modified-Since": lm}
-                got304 = False
-
-                try:
-                        c, v = misc.versioned_urlopen(depot_url, "catalog",
-                            [0], headers=hdr)
-                except urllib2.HTTPError, e:
-                        # Server returns NOT_MODIFIED if catalog is up
-                        # to date
-                        if e.code == httplib.NOT_MODIFIED:
-                                got304 = True
-                        else:
-                                raise
-                # 200 OK won't raise an exception, but it's still a failure.
-                self.assert_(got304)
-
-                # Now send another package and verify that we get an
-                # incremental update
-                self.pkgsend_bulk(depot_url, self.update11)
-                c, v = misc.versioned_urlopen(depot_url, "catalog",
-                    [0], headers=hdr)
-
-                update_type = c.info().getheader("X-Catalog-Type", None)
-                self.assert_(update_type == "incremental")
-
         def test_face_root(self):
                 """Verify that files outside of the package content web root
                 cannot be accessed, and that files inside can be."""
@@ -359,6 +323,7 @@
 
                 self.__dc = dc.DepotController()
                 self.__pid = os.getpid()
+                self.__dc.set_property("publisher", "prefix", "test")
                 self.__dc.set_depotd_path(testutils.g_proto_area + \
                     "/usr/lib/pkg.depotd")
                 self.__dc.set_depotd_content_root(testutils.g_proto_area + \
@@ -382,7 +347,6 @@
                 shutil.rmtree(self.__dc.get_repodir())
                 os.remove(self.__dc.get_logpath())
 
-
         def testStartStop(self):
                 self.__dc.set_port(12000)
                 for i in range(0, 5):
@@ -391,14 +355,12 @@
                         self.__dc.stop()
                         self.assert_(not self.__dc.is_alive())
 
-
         def test_cfg_file(self):
                 cfg_file = os.path.join(self.get_test_prefix(), "cfg2")
                 fh = open(cfg_file, "w")
                 fh.close()
                 self.__dc.set_port(12000)
                 self.__dc.set_cfg_file(cfg_file)
-
                 self.__dc.start()
 
         def test_writable_root(self):
@@ -521,6 +483,50 @@
 
                 self.assert_(self.__dc.start_expected_fail())
 
+        def test_disable_ops(self):
+                """Verify that disable-ops works as expected."""
+
+                # For this disabled case, /catalog/1/ should return
+                # a NOT_FOUND error.
+                self.__dc.set_disable_ops(["catalog/1"])
+                self.__dc.set_port(12000)
+                self.__dc.start()
+                durl = self.__dc.get_depot_url()
+                try:
+                        urllib2.urlopen("%s/catalog/1/" % durl)
+                except urllib2.HTTPError, e:
+                        self.assertEqual(e.code, httplib.NOT_FOUND)
+                self.__dc.stop()
+
+                # For this disabled case, all /catalog/ operations should return
+                # a NOT_FOUND error.
+                self.__dc.set_disable_ops(["catalog"])
+                self.__dc.set_port(12000)
+                self.__dc.start()
+                durl = self.__dc.get_depot_url()
+                for ver in (0, 1):
+                        try:
+                                urllib2.urlopen("%s/catalog/%d/" % (durl, ver))
+                        except urllib2.HTTPError, e:
+                                self.assertEqual(e.code, httplib.NOT_FOUND)
+                self.__dc.stop()
+
+                # In the normal case, /catalog/1/ should return
+                # a FORBIDDEN error.
+                self.__dc.unset_disable_ops()
+                self.__dc.start()
+                durl = self.__dc.get_depot_url()
+                try:
+                        urllib2.urlopen("%s/catalog/1/" % durl)
+                except urllib2.HTTPError, e:
+                        self.assertEqual(e.code, httplib.FORBIDDEN)
+                self.__dc.stop()
+
+                # A bogus operation should prevent the depot from starting.
+                self.__dc.set_disable_ops(["no_such_op/0"])
+                self.__dc.start_expected_fail()
+                self.assertFalse(self.__dc.is_alive())
+
 
 class TestDepotOutput(testutils.SingleDepotTestCase):
         # Since these tests are output sensitive, the depots should be purged
@@ -584,6 +590,10 @@
                 # All of the tests will start depot if needed.
                 self.dc.stop()
 
+                # Prevent override of custom configuration;
+                # tests will set as needed.
+                self.dc.clear_property("publisher", "prefix")
+
                 self.tpath = tempfile.mkdtemp()
 
         def tearDown(self):
@@ -594,6 +604,8 @@
                 """Verify that a non-error response and valid HTML is returned
                 for each known BUI page in every available depot mode."""
 
+                self.dc.set_property("publisher", "prefix", "test")
+
                 # A list of tuples containing the name of the method used to set
                 # the mode, and then the method needed to unset that mode.
                 mode_methods = [
@@ -666,9 +678,9 @@
                 # Update the configuration with our sample data.
                 cfg = self.repo_cfg
                 for section in cfg:
-                        for attr in cfg[section]:
-                                rc.set_attribute(section, attr,
-                                    cfg[section][attr])
+                        for prop in cfg[section]:
+                                rc.set_property(section, prop,
+                                    cfg[section][prop])
 
                 # Save it.
                 rc.write(rcpath)
@@ -689,15 +701,15 @@
 
                 # Now verify that the parsed response has the expected data.
                 cfg = self.repo_cfg
-                for attr in cfg["publisher"]:
-                        self.assertEqual(getattr(pub, attr),
-                            cfg["publisher"][attr])
+                for prop in cfg["publisher"]:
+                        self.assertEqual(getattr(pub, prop),
+                            cfg["publisher"][prop])
 
                 repo = pub.selected_repository
-                for attr in cfg["repository"]:
-                        returned = getattr(repo, attr)
-                        expected = cfg["repository"][attr]
-                        if attr.endswith("uris") or attr == "origins":
+                for prop in cfg["repository"]:
+                        returned = getattr(repo, prop)
+                        expected = cfg["repository"][prop]
+                        if prop.endswith("uris") or prop == "origins":
                                 uris = []
                                 for u in returned:
                                         uris.append(u.uri)
@@ -725,11 +737,16 @@
                 for p in plist:
                         purl = urlparse.urljoin(durl, "p5i/0/%s" % p)
                         pub, pkglist = p5i.parse(location=purl)[0]
-                        self.assertEqual(pkglist, [p])
+
+                        # p5i files contain non-qualified FMRIs as the FMRIs
+                        # are already grouped by publisher.
+                        nq_p = fmri.PkgFmri(p).get_fmri(anarchy=True,
+                            include_scheme=False)
+                        self.assertEqual(pkglist, [nq_p])
 
                 # Try again, but only using package stems.
                 for p in plist:
-                        stem = p.split("@", 1)[0]
+                        stem = fmri.PkgFmri(p).pkg_name
                         purl = urlparse.urljoin(durl, "p5i/0/%s" % stem)
                         pub, pkglist = p5i.parse(location=purl)[0]
                         self.assertEqual(pkglist, [stem])
--- a/src/tests/cli/t_pkg_history.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_history.py	Fri Oct 23 17:43:37 2009 -0500
@@ -53,7 +53,7 @@
             close """
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 2)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2"])
 
                 durl1 = self.dcs[1].get_depot_url()
                 self.pkgsend_bulk(durl1, self.foo1)
@@ -63,11 +63,12 @@
                 self.dcs[2].stop()
                 d1dir = self.dcs[1].get_repodir()
                 d2dir = self.dcs[2].get_repodir()
-                shutil.rmtree(d2dir)
-                shutil.copytree(d1dir, d2dir)
+                self.copy_repository(d1dir, "test1", d2dir, "test2")
+                self.dcs[2].set_rebuild()
                 self.dcs[2].start()
+                self.dcs[2].set_norebuild()
 
-                self.image_create(durl1, prefix = "test1")
+                self.image_create(durl1, prefix="test1")
 
         def tearDown(self):
                 testutils.ManyDepotTestCase.tearDown(self)
--- a/src/tests/cli/t_pkg_image_create.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_image_create.py	Fri Oct 23 17:43:37 2009 -0500
@@ -41,7 +41,7 @@
         persistent_depot = True
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 2)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2"])
 
                 self.durl1 = self.dcs[1].get_depot_url()
                 self.durl2 = self.dcs[2].get_depot_url()
@@ -49,7 +49,7 @@
         def test_basic(self):
                 """ Create an image, verify it. """
 
-                self.image_create(self.durl1)
+                self.image_create(self.durl1, prefix="test1")
                 self.pkg("verify")
 
         def test_image_create_bad_opts(self):
@@ -77,6 +77,24 @@
                     fmri.get_link_path()), "w")
                 fi.close()
 
+        @staticmethod
+        def __transform_v1_v0(v1_cat, v0_dest):
+                name = os.path.join(v0_dest, "attrs")
+                f = open(name, "wb")
+                f.write("S "
+                    "Last-Modified: %s\n" % v1_cat.last_modified.isoformat())
+                f.write("S prefix: CRSV\n")
+                f.write("S npkgs: %s\n" % v1_cat.package_version_count)
+                f.close()
+
+                name = os.path.join(v0_dest, "catalog")
+                f = open(name, "wb")
+                # Now write each FMRI in the catalog in the v0 format:
+                # V pkg:/[email protected],5.11-0.86:20080426T173208Z
+                for pub, stem, ver in v1_cat.tuples():
+                        f.write("V pkg:/%s@%s\n" % (stem, ver))
+                f.close()
+
         def test_766(self):
                 """Bug 766: image-create without publisher prefix specified."""
 
@@ -87,7 +105,7 @@
                 self.pkgsend_bulk(self.durl1, pkgsend_data)
 
                 self.assertRaises(testutils.UnexpectedExitCodeException,
-                    self.image_create, self.durl1, "")
+                    self.image_create, self.durl1, prefix="")
 
         def test_3588(self):
                 """Ensure that image creation works as expected when an image
@@ -97,16 +115,16 @@
                 #
                 # Bug 3588: Make sure we can't create an image where one
                 # already exists
-                self.pkg("image-create -p mydepot=%s %s/3588_image" % (
+                self.pkg("image-create -p test1=%s %s/3588_image" % (
                     self.durl1, self.get_img_path()))
-                self.pkg("image-create -p mydepot=%s %s/3588_image" % (
+                self.pkg("image-create -p test1=%s %s/3588_image" % (
                     self.durl1, self.get_img_path()), exit=1)
 
                 # Make sure we can create an image where one
                 # already exists with the -f (force) flag
-                self.pkg("image-create -p mydepot=%s %s/3588_image_1" % (
+                self.pkg("image-create -p test1=%s %s/3588_image_1" % (
                     self.durl1, self.get_img_path()))
-                self.pkg("image-create -f -p mydepot=%s %s/3588_image_1" %
+                self.pkg("image-create -f -p test1=%s %s/3588_image_1" %
                          (self.durl1, self.get_img_path()))
 
                 # Bug 3588: Make sure we can't create an image where a
@@ -114,9 +132,9 @@
                 p = os.path.join(self.get_img_path(), "3588_2_image")
                 os.mkdir(p)
                 self.cmdline_run("touch %s/%s" % (p, "somefile"))
-                self.pkg("image-create -p mydepot=%s %s" % (self.durl1, p),
+                self.pkg("image-create -p test1=%s %s" % (self.durl1, p),
                     exit=1)
-                self.pkg("image-create -f -p mydepot=%s %s" % (self.durl1, p))
+                self.pkg("image-create -f -p test1=%s %s" % (self.durl1, p))
 
         def test_4_options(self):
                 """Verify that all of the options for specifying publisher
@@ -124,7 +142,7 @@
 
                 img_path = os.path.join(self.get_test_prefix(), "test_4_img")
                 for opt in ("-a", "-p", "--publisher"):
-                        self.pkg("image-create %s mydepot=%s %s" % (opt,
+                        self.pkg("image-create %s test1=%s %s" % (opt,
                             self.durl1, img_path))
                         shutil.rmtree(img_path)
 
@@ -149,7 +167,7 @@
                 # specified image root if the specified root doesn't already
                 # exist.
                 os.chdir(self.get_test_prefix())
-                self.pkg("image-create -p mydepot=%s %s" % (self.durl1,
+                self.pkg("image-create -p test1=%s %s" % (self.durl1,
                     img_path))
                 os.chdir(pwd)
                 self.assertFalse(os.path.exists(os.path.join(abs_img_path,
@@ -160,7 +178,7 @@
                 # specified image root if the specified root already exists.
                 os.chdir(self.get_test_prefix())
                 os.mkdir(img_path)
-                self.pkg("image-create -p mydepot=%s %s" % (self.durl1,
+                self.pkg("image-create -p test1=%s %s" % (self.durl1,
                     img_path))
                 os.chdir(pwd)
                 self.assertFalse(os.path.exists(os.path.join(abs_img_path,
@@ -179,14 +197,14 @@
 
                 # First, check to be certain that an image-create --no-refresh
                 # will succeed.
-                self.image_create(self.durl2, prefix="norefresh",
+                self.image_create(self.durl2, prefix="test1",
                     additional_args="--no-refresh")
                 self.pkg("list --no-refresh -a | grep baz", exit=1)
 
                 # Finally, verify that using set-publisher will cause a refresh
                 # which in turn should cause 'baz' to be listed *if* the origin
                 # has changed (setting it to the same value again won't work).
-                self.pkg("set-publisher -O %s norefresh" % self.durl1)
+                self.pkg("set-publisher -O %s test1" % self.durl1)
                 self.pkg("list --no-refresh -a | grep baz")
 
         def test_8_image_upgrade(self):
@@ -199,7 +217,7 @@
                 self.pkgsend_bulk(self.durl2, "open [email protected]\nclose")
 
                 # First, create a new image.
-                self.image_create(self.durl1)
+                self.image_create(self.durl1, prefix="test1")
 
                 # Add the second repository.
                 self.pkg("set-publisher -O %s test2" % self.durl2)
@@ -211,7 +229,7 @@
                 # This is necessary to ensure that packages installed from a
                 # previously preferred publisher also get the correct status.
                 self.pkg("install corge")
-                self.pkg("set-publisher -P test")
+                self.pkg("set-publisher -P test1")
 
                 # Next, disable the second repository's publisher.
                 self.pkg("set-publisher -d test2")
@@ -220,16 +238,15 @@
                 img_root = os.path.join(self.get_img_path(), "var", "pkg")
                 cat_path = os.path.join(img_root, "catalog")
                 pub_path = os.path.join(img_root, "publisher")
-                v1_cat_path = os.path.join(pub_path, "test", "catalog")
-                v0_cat_path = os.path.join(cat_path, "test")
+
+                v1_cat = pkg.catalog.Catalog(meta_root=os.path.join(pub_path,
+                    "test1", "catalog"), read_only=True)
+                v0_cat_path = os.path.join(cat_path, "test1")
 
-                # For conversion, the v0 catalogs need to be moved to the
-                # v0 location.
+                # For conversion, the v0 catalogs need to be generated in
+                # the v0 location.
                 os.makedirs(v0_cat_path)
-                for fname in ("catalog", "attrs"):
-                        src = os.path.join(v1_cat_path, fname)
-                        dest = os.path.join(v0_cat_path, fname)
-                        pkg.portable.rename(src, dest)
+                self.__transform_v1_v0(v1_cat, v0_cat_path)
 
                 # The existing installed state has to be converted to v0.
                 state_dir = os.path.join(img_root, "state")
@@ -255,12 +272,12 @@
                 # an unprivileged user.  Each must be done with and without
                 # the publisher prefix to test that these are stripped and
                 # read properly (because of the publisher preferred prefix).
-                self.pkg("info pkg://test/quux corge", su_wrap=True)
+                self.pkg("info pkg://test1/quux corge", su_wrap=True)
                 self.pkg("info pkg://test2/corge quux", su_wrap=True)
 
                 # Next, verify that the new client can upgrade v0 images to
                 # v1 images.
-                self.pkg("info quux pkg://test/quux pkg://test2/corge")
+                self.pkg("info quux pkg://test1/quux pkg://test2/corge")
 
                 # Finally, verify that the old structures and state information
                 # are gone.
--- a/src/tests/cli/t_pkg_image_update.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_image_update.py	Fri Oct 23 17:43:37 2009 -0500
@@ -99,21 +99,30 @@
             close """
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 3)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                    "test3", "test4", "test5"])
                 durl1 = self.dcs[1].get_depot_url()
                 durl2 = self.dcs[2].get_depot_url()
+                durl4 = self.dcs[2].get_depot_url()
+                durl5 = self.dcs[2].get_depot_url()
                 self.pkgsend_bulk(durl1, self.foo10 + self.foo11 + \
                     self.baz11 + self.qux10 + self.qux11 + self.quux10 + \
                     self.quux11 + self.corge11)
                 self.pkgsend_bulk(durl2, self.foo10 + self.bar10 + \
                     self.bar11 + self.baz10 + self.qux10 + self.qux11 + \
                     self.quux10 + self.quux11 + self.corge10)
+                self.pkgsend_bulk(durl4, self.foo10 + self.bar10 + \
+                    self.bar11 + self.baz10 + self.qux10 + self.qux11 + \
+                    self.quux10 + self.quux11 + self.corge10)
+                self.pkgsend_bulk(durl5, self.foo10 + self.bar10 + \
+                    self.bar11 + self.baz10 + self.qux10 + self.qux11 + \
+                    self.quux10 + self.quux11 + self.corge10)
 
         def test_image_update_bad_opts(self):
                 """Test image-update with bad options."""
 
                 durl1 = self.dcs[1].get_depot_url()
-                self.image_create(durl1)
+                self.image_create(durl1, prefix="test1")
 
                 self.pkg("image-update -@", exit=2)
                 self.pkg("image-update -vq", exit=2)
@@ -127,7 +136,9 @@
                 durl1 = self.dcs[1].get_depot_url()
                 durl2 = self.dcs[2].get_depot_url()
                 durl3 = self.dcs[3].get_depot_url()
-                self.image_create(durl1)
+                durl4 = self.dcs[4].get_depot_url()
+                durl5 = self.dcs[5].get_depot_url()
+                self.image_create(durl1, prefix="test1")
 
                 # Install a package from the preferred publisher.
                 self.pkg("install [email protected]")
@@ -143,32 +154,23 @@
                 self.pkg("set-publisher -O %s test2" % durl3)
                 self.pkg("image-update -nv")
 
-                # Add two publishers using the removed publisher's repository,
+                # Add two publishers with the same packages as a removed one;
                 # an image-update should be possible despite the conflict (as
                 # the newer versions will simply be ignored).
-                self.pkg("set-publisher -O %s test3" % durl2)
-                self.pkg("set-publisher -O %s test4" % durl2)
-                self.pkg("image-update -nv")
-                self.pkg("unset-publisher test4")
-                self.pkg("unset-publisher test3")
-
-                # With the publisher of an installed package unknown, add a new
-                # publisher using the repository the package was originally
-                # installed from.  An image-update should still be possible (see
-                # bug 6856).
-                self.pkg("set-publisher -O %s test3" % durl2)
+                self.pkg("unset-publisher test2")
+                self.pkg("set-publisher -O %s test4" % durl4)
+                self.pkg("set-publisher -O %s test5" % durl5)
                 self.pkg("image-update -nv")
 
-                # Remove the publisher of an installed package, then add the
-                # publisher back, but with an empty catalog.  Then add a new
-                # publisher using the repository the package was originally
-                # installed from.  An image-update should still be possible (see
-                # bug 6856).
-                self.pkg("unset-publisher test2")
-                self.pkg("set-publisher -O %s test2" % durl3)
-                self.pkg("set-publisher -O %s test3" % durl2)
+                # Remove one of the conflicting publishers. An image-update
+                # should still be possible even though the conflicts no longer
+                # exist and the original publisher is unknown (see bug 6856).
+                self.pkg("unset-publisher test4")
                 self.pkg("image-update -nv")
 
+                # Remove the remaining test publisher.
+                self.pkg("unset-publisher test5")
+
         def test_02_update_multi_publisher(self):
                 """Verify that image-updates work as expected when different
                 publishers offer the same package."""
--- a/src/tests/cli/t_pkg_install.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_install.py	Fri Oct 23 17:43:37 2009 -0500
@@ -2090,14 +2090,17 @@
             close"""
 
         def setUp(self):
-                """ Start two depots.
+                """ Start four depots.
                     depot 1 gets foo and moo, depot 2 gets foo and bar,
-                    depot 3 is empty
+                    depot 3 is empty, depot 4 gets [email protected]
                     depot1 is mapped to publisher test1 (preferred)
                     depot2 is mapped to publisher test2
-                    depot3 is not mapped to a publisher"""
-
-                testutils.ManyDepotTestCase.setUp(self, 3)
+                    depot3 is not mapped during setUp
+                    depot4 is not mapped during setUp"""
+
+                # Two depots are intentionally started for test2.
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                    "test3", "test2"])
 
                 durl1 = self.dcs[1].get_depot_url()
                 self.pkgsend_bulk(durl1, self.foo10 + self.moo10 + \
@@ -2110,6 +2113,9 @@
                 self.pkgsend_bulk(durl2, self.foo10 + self.bar10 + \
                     self.upgrade_p11 + self.upgrade_np10 + self.corge10)
 
+                durl4 = self.dcs[4].get_depot_url()
+                self.pkgsend_bulk(durl4, self.upgrade_np11)
+
                 # Create image and hence primary publisher
                 self.image_create(durl1, prefix="test1")
 
@@ -2225,10 +2231,10 @@
                 # Set test1 to point to an unreachable URI.
                 self.pkg("set-publisher --no-refresh -O http://test.invalid7 test1")
 
-                # Set test2 to point to test1's repository so that the
-                # image-update can happen (see bug 8613).
-                durl1 = self.dcs[1].get_depot_url()
-                self.pkg("set-publisher -O %s test2" % durl1)
+                # Set test2 so that upgrade-np has a new version available
+                # even though test1's repository is not accessible.
+                durl4 = self.dcs[4].get_depot_url()
+                self.pkg("set-publisher -O %s test2" % durl4)
 
                 # Verify image-update works even though test1 is unreachable
                 # since [email protected] is available from test2.
@@ -2236,6 +2242,7 @@
 
                 # Now reset everything for the next test.
                 self.pkg("uninstall upgrade-np")
+                durl1 = self.dcs[1].get_depot_url()
                 self.pkg("set-publisher --no-refresh -O %s test1" % durl1)
                 self.pkg("set-publisher -O %s test2" % durl2)
 
@@ -2272,6 +2279,7 @@
                 self.pkg("set-publisher -O %s test1" % \
                     self.dcs[3].get_depot_url())
                 self.pkg("install [email protected]")
+                self.pkg("info [email protected]")
                 self.pkg("unset-publisher test1")
 
                 # Add a new publisher, using the installed package publisher's
--- a/src/tests/cli/t_pkg_intent.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_intent.py	Fri Oct 23 17:43:37 2009 -0500
@@ -78,7 +78,7 @@
             open [email protected],5.11-0
             add depend type=require fmri=pkg:/[email protected]
             add dir mode=0755 owner=root group=bin path=/bin
-            add file /tmp/cat mode=0555 owner=root group=bin path=/bin/cat 
+            add file /tmp/cat mode=0555 owner=root group=bin path=/bin/cat
             close """
 
         baz10 = """
@@ -176,22 +176,31 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: False, PKG_CLIENT_NAME)
 
-                api_obj.info(plist, False, frozenset([api.PackageInfo.IDENTITY,
-                    api.PackageInfo.STATE, api.PackageInfo.PREF_PUBLISHER]))
+                info_needed = api.PackageInfo.ALL_OPTIONS - \
+                    frozenset([api.PackageInfo.LICENSES,
+                    api.PackageInfo.SIZE]) - \
+                    (api.PackageInfo.ACTION_OPTIONS - \
+                    frozenset([api.PackageInfo.DEPENDENCIES]))
+
+                api_obj.info(plist, False, info_needed)
 
                 entries = self.get_intent_entries()
-                self.assert_(entries == [])
-                
+                self.assertEqual(entries, [])
+
                 api_obj.info(plist, False,
-                    frozenset([api.PackageInfo.DEPENDENCIES]))
+                    info_needed | api.PackageInfo.ACTION_OPTIONS)
 
                 entries = self.get_intent_entries()
                 # Verify that evaluation and processing entries are present
-                # for info.
+                # for info.  This will only happen if the client actually
+                # has to contact the repository to get information not found
+                # in the catalog.
+                target = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "info",
                     "reason": "info",
-                    "initial_target": plist[0],
+                    "initial_target": target,
                 }))
 
         def test_1_install_uninstall(self):
@@ -214,37 +223,39 @@
                 self.__do_uninstall(api_obj, ["foo"])
 
                 entries = self.get_intent_entries()
-                # Verify that evaluation and processing entries are present
-                # for install.
+                # Verify that entries are present for install.
+                target = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "evaluate",
-                    "initial_target": plist[0],
+                    "reason": "info",
+                    "initial_target": target,
                 }))
 
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "process",
-                    "initial_target": plist[0],
+                    "reason": "info",
+                    "initial_target": target,
                 }))
 
                 # Verify that evaluation entries are not present for uninstall.
                 # Image operations that are for evaluation only and do not
                 # require retrieving manifest information will not send any
                 # intent information for efficiency.
+                target_ver = str(fmri.PkgFmri(target).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
+                    "prior_version": target_ver,
                     "reason": "evaluate",
-                    "initial_target": plist[0],
+                    "initial_target": target,
                 }) == False)
 
                 # Verify that processing entries are present for uninstall.
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
+                    "prior_version": target_ver,
                     "reason": "process",
-                    "initial_target": plist[0],
+                    "initial_target": target,
                 }))
 
         def test_2_upgrade(self):
@@ -270,32 +281,34 @@
                 self.__do_uninstall(api_obj, ["foo"])
 
                 entries = self.get_intent_entries()
-                # Verify that evaluation and processing entries are present
-                # for install.
+                # Verify entries are present for install.
+                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "evaluate",
-                    "initial_target": plist[0],
+                    "reason": "info",
+                    "initial_target": target0,
+                }))
+
+                target1 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True,
+                    include_scheme=False)
+                self.assert_(self.intent_entry_exists(entries, {
+                    "operation": "install",
+                    "reason": "info",
+                    "initial_target": target1,
                 }))
 
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "evaluate",
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
-                    "initial_target": plist[1],
+                    "reason": "info",
+                    "initial_target": target0,
                 }))
 
+                version0 = str(fmri.PkgFmri(target0).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "process",
-                    "initial_target": plist[0],
-                }))
-
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "process",
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
-                    "initial_target": plist[1],
+                    "reason": "info",
+                    "initial_target": target1,
                 }))
 
                 # Verify that evaluation entries are not present for uninstall.
@@ -306,14 +319,15 @@
                     "operation": "uninstall",
                     "reason": "evaluate",
                     "prior_version": str(fmri.PkgFmri(plist[1]).version),
-                    "initial_target": plist[1],
+                    "initial_target": target1,
                 }) == False)
 
+                version1 = str(fmri.PkgFmri(target1).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
                     "reason": "process",
-                    "prior_version": str(fmri.PkgFmri(plist[1]).version),
-                    "initial_target": plist[1],
+                    "prior_version": version1,
+                    "initial_target": target1,
                 }))
 
         def test_3_dependencies(self):
@@ -332,18 +346,20 @@
 
                 # Only testing for process; no need to re-test for evaluate.
                 entries = self.get_intent_entries()
+                target1 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "process",
-                    "initial_target": plist[1],
+                    "reason": "info",
+                    "initial_target": target1,
                 }))
 
+                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "needed_by": plist[1],
-                    "reason": "process",
-                    "initial_target": plist[1],
-                    "target": plist[0],
+                    "reason": "info",
+                    "initial_target": target0,
                 }))
 
         def test_4_image_upgrade(self):
@@ -373,19 +389,23 @@
                 # Only testing for process; no need to re-test for evaluate.
                 entries = self.get_intent_entries()
                 # Verify that foo10 was installed when upgrading to foo12.
+                version0 = str(fmri.PkgFmri(plist[0]).version)
+                target3 = fmri.PkgFmri(plist[3]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "image-update",
-                    "reason": "process",
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
-                    "initial_target": plist[3],
+                    "reason": "info",
+                    "initial_target": target3,
                 }))
 
+                version2 = str(fmri.PkgFmri(plist[2]).version)
+                target4 = fmri.PkgFmri(plist[4]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 # Verify that bar10 was installed when upgrading to bar11.
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "image-update",
-                    "reason": "process",
-                    "prior_version": str(fmri.PkgFmri(plist[2]).version),
-                    "initial_target": plist[4],
+                    "reason": "info",
+                    "initial_target": target4,
                 }))
 
         def test_5_recursive_uninstall(self):
@@ -402,24 +422,30 @@
 
                 # Only testing for process; no need to re-test for evaluate.
                 self.__do_uninstall(api_obj, ["foo"], True)
-                                       
+
                 entries = self.get_intent_entries()
                 # Verify that foo10 was uninstalled.
+                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
+                    include_scheme=False)
+                version0 = str(fmri.PkgFmri(target0).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
                     "reason": "process",
-                    "initial_target": plist[0],
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
+                    "initial_target": target0,
+                    "prior_version": version0,
                 }))
 
                 # Verify that bar10 was uninstalled because of foo10.
+                target2 = fmri.PkgFmri(plist[2]).get_fmri(anarchy=True,
+                    include_scheme=False)
+                version2 = str(fmri.PkgFmri(target2).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "needed_by": plist[0],
+                    "needed_by": target0,
                     "reason": "process",
-                    "initial_target": plist[0],
-                    "target": plist[2],
-                    "prior_version": str(fmri.PkgFmri(plist[2]).version),
+                    "initial_target": target0,
+                    "target": target2,
+                    "prior_version": version2,
                 }))
 
         def test_6_deep_dependencies(self):
@@ -444,31 +470,31 @@
                 # Verify the install entries.
                 #
 
-                # Verify baz is the initial target.
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "process",
-                    "initial_target": plist[2],
-                }))
-
-                # Verify baz is the initial target, bar is needed_by baz, and
-                # bar is the target.
+                # Verify baz is logged.
+                target2 = fmri.PkgFmri(plist[2]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "needed_by": plist[2],
-                    "reason": "process",
-                    "initial_target": plist[2],
-                    "target": plist[1],
+                    "reason": "info",
+                    "initial_target": target2,
                 }))
 
-                # Verify baz is the initial target, foo is needed_by bar, and
-                # foo is the target.
+                # Verify bar is logged.
+                target1 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True,
+                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "needed_by": plist[1],
-                    "reason": "process",
-                    "initial_target": plist[2],
-                    "target": plist[0],
+                    "reason": "info",
+                    "initial_target": target1,
+                }))
+
+                # Verify foo is logged.
+                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
+                    include_scheme=False)
+                self.assert_(self.intent_entry_exists(entries, {
+                    "operation": "install",
+                    "reason": "info",
+                    "initial_target": target0,
                 }))
 
                 #
@@ -476,33 +502,36 @@
                 #
 
                 # Verify foo is the initial target.
+                version0 = str(fmri.PkgFmri(target0).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
                     "reason": "process",
-                    "initial_target": plist[0],
-                    "prior_version": str(fmri.PkgFmri(plist[0]).version),
+                    "initial_target": target0,
+                    "prior_version": version0,
                 }))
 
                 # Verify foo is the initial target, bar is needed_by foo, and
                 # foo is the target.
+                version1 = str(fmri.PkgFmri(target1).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "needed_by": plist[0],
+                    "needed_by": target0,
                     "reason": "process",
-                    "initial_target": plist[0],
-                    "target": plist[1],
-                    "prior_version": str(fmri.PkgFmri(plist[1]).version),
+                    "initial_target": target0,
+                    "target": target1,
+                    "prior_version": version1,
                 }))
 
                 # Verify foo is the initial target, baz is needed_by bar, and
                 # baz is the target.
+                version2 = str(fmri.PkgFmri(target2).version)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "needed_by": plist[1],
+                    "needed_by": target1,
                     "reason": "process",
-                    "initial_target": plist[0],
-                    "target": plist[2],
-                    "prior_version": str(fmri.PkgFmri(plist[2]).version),
+                    "initial_target": target0,
+                    "target": target2,
+                    "prior_version": version2,
                 }))
 
 
--- a/src/tests/cli/t_pkg_list.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_list.py	Fri Oct 23 17:43:37 2009 -0500
@@ -29,11 +29,12 @@
 if __name__ == "__main__":
         testutils.setup_environment("../../../proto")
 
+import calendar
 import difflib
 import os
-import pkg.catalog as catalog
 import re
 import shutil
+import simplejson as json
 import unittest
 
 class TestPkgList(testutils.ManyDepotTestCase):
@@ -69,7 +70,8 @@
             close """
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 3)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                    "test3"])
 
                 durl1 = self.dcs[1].get_depot_url()
                 self.pkgsend_bulk(durl1, self.foo1 + self.foo10 + self.foo11 + \
@@ -84,15 +86,19 @@
                 self.dcs[2].stop()
                 d1dir = self.dcs[1].get_repodir()
                 d2dir = self.dcs[2].get_repodir()
-                shutil.rmtree(d2dir)
-                shutil.copytree(d1dir, d2dir)
+                self.copy_repository(d1dir, "test1", d2dir, "test2")
+
+                # The new repository won't have a catalog, so set the depot
+                # server to rebuild it.
+                self.dcs[2].set_rebuild()
                 self.dcs[2].start()
+                self.dcs[2].set_norebuild()
 
                 # The third repository should remain empty and not be
                 # published to.
 
-                self.image_create(durl1, prefix = "test1")
-
+                # Next, create the image and configure publishers.
+                self.image_create(durl1, prefix="test1")
                 self.pkg("set-publisher -O " + durl2 + " test2")
 
         def reduceSpaces(self, string):
@@ -240,7 +246,6 @@
                 publisher, and verify that list still shows the package
                 as installed."""
 
-                durl1 = self.dcs[1].get_depot_url()
                 durl2 = self.dcs[2].get_depot_url()
                 durl3 = self.dcs[3].get_depot_url()
 
@@ -275,15 +280,15 @@
 
                 # With the publisher of an installed package unknown, add a new
                 # publisher using the repository the package was originally
-                # installed from.  The should be shown as known for test1,
-                # installed for test2, and known for test3.
+                # installed from.  The pkg should be shown as known for test1,
+                # installed for test2, and test3 shouldn't be listed since the
+                # packages in the specified repository are for publisher test2.
                 self.pkg("unset-publisher test2")
                 self.pkg("set-publisher -O %s test3" % durl2)
                 self.pkg("list -aH [email protected]")
                 expected = \
                     "foo 1.0-0 known u---\n" + \
-                    "foo (test2) 1.0-0 installed u---\n" + \
-                    "foo (test3) 1.0-0 known u---\n"
+                    "foo (test2) 1.0-0 installed u---\n"
                 output = self.reduceSpaces(self.output)
                 self.assertEqualDiff(expected, output)
                 self.pkg("unset-publisher test3")
--- a/src/tests/cli/t_pkg_publisher.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_publisher.py	Fri Oct 23 17:43:37 2009 -0500
@@ -150,12 +150,14 @@
                 self.pkg("set-publisher --no-refresh -k %s test1" % key_path)
                 os.close(key_fh)
                 os.unlink(key_path)
-                self.pkg("set-publisher --no-refresh -k %s test2" % key_path, exit=1)
+                self.pkg("set-publisher --no-refresh -k %s test2" % key_path,
+                    exit=1)
 
                 self.pkg("set-publisher --no-refresh -c %s test1" % cert_path)
                 os.close(cert_fh)
                 os.unlink(cert_path)
-                self.pkg("set-publisher --no-refresh -c %s test2" % cert_path, exit=1)
+                self.pkg("set-publisher --no-refresh -c %s test2" % cert_path,
+                    exit=1)
 
                 self.pkg("publisher test1")
                 self.pkg("publisher test3", exit=1)
@@ -171,7 +173,7 @@
         def test_publisher_validation(self):
                 """Verify that we catch poorly formed auth prefixes and URL"""
                 durl = self.dc.get_depot_url()
-                self.image_create(durl)
+                self.image_create(durl, prefix="test")
 
                 self.pkg("set-publisher -O http://%s1 test1" % self.bogus_url,
                     exit=1)
@@ -191,38 +193,36 @@
         def test_mirror(self):
                 """Test set-mirror and unset-mirror."""
                 durl = self.dc.get_depot_url()
-                pfx = "mtest"
-                self.image_create(durl, prefix = pfx)
+                self.image_create(durl, prefix="test")
 
-                self.pkg("set-publisher -m http://%s1 mtest" % self.bogus_url)
-                self.pkg("set-publisher -m http://%s2 mtest" %
+                self.pkg("set-publisher -m http://%s1 test" % self.bogus_url)
+                self.pkg("set-publisher -m http://%s2 test" %
                     self.bogus_url)
                 self.pkg("set-publisher -m http://%s5" % self.bogus_url, exit=2)
-                self.pkg("set-publisher -m mtest", exit=2)
-                self.pkg("set-publisher -m http://%s1 mtest" % self.bogus_url,
+                self.pkg("set-publisher -m test", exit=2)
+                self.pkg("set-publisher -m http://%s1 test" % self.bogus_url,
                     exit=1)
-                self.pkg("set-publisher -m http://%s5 test" % self.bogus_url,
+                self.pkg("set-publisher -m http://%s5 test1" % self.bogus_url,
                     exit=1)
-                self.pkg("set-publisher -m %s7 mtest" % self.bogus_url, exit=1)
+                self.pkg("set-publisher -m %s7 test" % self.bogus_url, exit=1)
 
-                self.pkg("set-publisher -M http://%s1 mtest" % self.bogus_url)
-                self.pkg("set-publisher -M http://%s2 mtest" %
+                self.pkg("set-publisher -M http://%s1 test" % self.bogus_url)
+                self.pkg("set-publisher -M http://%s2 test" %
                     self.bogus_url)
-                self.pkg("set-publisher -M mtest http://%s2 http://%s4" %
+                self.pkg("set-publisher -M test1 http://%s2 http://%s4" %
                     (self.bogus_url, self.bogus_url), exit=2)
                 self.pkg("set-publisher -M http://%s5" % self.bogus_url, exit=2)
-                self.pkg("set-publisher -M mtest", exit=2)
-                self.pkg("set-publisher -M http://%s5 test" % self.bogus_url,
+                self.pkg("set-publisher -M test", exit=2)
+                self.pkg("set-publisher -M http://%s5 test1" % self.bogus_url,
                     exit=1)
-                self.pkg("set-publisher -M http://%s6 mtest" % self.bogus_url,
+                self.pkg("set-publisher -M http://%s6 test" % self.bogus_url,
                     exit=1)
-                self.pkg("set-publisher -M %s7 mtest" % self.bogus_url, exit=1)
+                self.pkg("set-publisher -M %s7 test" % self.bogus_url, exit=1)
 
         def test_missing_perms(self):
                 """Bug 2393"""
                 durl = self.dc.get_depot_url()
-                pfx = "mtest"
-                self.image_create(durl, prefix=pfx)
+                self.image_create(durl, prefix="test")
 
                 self.pkg("set-publisher --no-refresh -O http://%s1 test1" %
                     self.bogus_url, su_wrap=True, exit=1)
@@ -234,20 +234,21 @@
                 self.pkg("unset-publisher foo", su_wrap=True, exit=1)
                 self.pkg("unset-publisher foo")
 
-                self.pkg("set-publisher -m http://%s1 mtest" % self.bogus_url, \
+                self.pkg("set-publisher -m http://%s1 test" % self.bogus_url, \
                     su_wrap=True, exit=1)
-                self.pkg("set-publisher -m http://%s2 mtest" %
+                self.pkg("set-publisher -m http://%s2 test" %
                     self.bogus_url)
 
-                self.pkg("set-publisher -M http://%s2 mtest" %
+                self.pkg("set-publisher -M http://%s2 test" %
                     self.bogus_url, su_wrap=True, exit=1)
-                self.pkg("set-publisher -M http://%s2 mtest" %
+                self.pkg("set-publisher -M http://%s2 test" %
                     self.bogus_url)
 
                 # Now change the first publisher to a https URL so that
                 # certificate failure cases can be tested.
                 key_fh, key_path = tempfile.mkstemp(dir=self.get_test_prefix())
-                cert_fh, cert_path = tempfile.mkstemp(dir=self.get_test_prefix())
+                cert_fh, cert_path = tempfile.mkstemp(
+                    dir=self.get_test_prefix())
 
                 self.pkg("set-publisher --no-refresh -O https://%s1 test1" %
                     self.bogus_url)
@@ -269,37 +270,36 @@
         def test_mirror_longopt(self):
                 """Test set-mirror and unset-mirror."""
                 durl = self.dc.get_depot_url()
-                pfx = "mtest"
-                self.image_create(durl, prefix = pfx)
+                self.image_create(durl, prefix="test")
 
-                self.pkg("set-publisher --add-mirror=http://%s1 mtest" %
+                self.pkg("set-publisher --add-mirror=http://%s1 test" %
                     self.bogus_url)
-                self.pkg("set-publisher --add-mirror=http://%s2 mtest" %
+                self.pkg("set-publisher --add-mirror=http://%s2 test" %
                     self.bogus_url)
                 self.pkg("set-publisher --add-mirror=http://%s5" %
                     self.bogus_url, exit=2)
-                self.pkg("set-publisher --add-mirror=mtest", exit=2)
-                self.pkg("set-publisher --add-mirror=http://%s1 mtest" %
+                self.pkg("set-publisher --add-mirror=test", exit=2)
+                self.pkg("set-publisher --add-mirror=http://%s1 test" %
                     self.bogus_url, exit=1)
-                self.pkg("set-publisher --add-mirror=http://%s5 test" %
+                self.pkg("set-publisher --add-mirror=http://%s5 test1" %
                     self.bogus_url, exit=1)
-                self.pkg("set-publisher --add-mirror=%s7 mtest" %
+                self.pkg("set-publisher --add-mirror=%s7 test" %
                     self.bogus_url, exit=1)
 
-                self.pkg("set-publisher --remove-mirror=http://%s1 mtest" %
+                self.pkg("set-publisher --remove-mirror=http://%s1 test" %
                     self.bogus_url)
-                self.pkg("set-publisher --remove-mirror=http://%s2 mtest" %
+                self.pkg("set-publisher --remove-mirror=http://%s2 test" %
                     self.bogus_url)
-                self.pkg("set-publisher --remove-mirror=mtest http://%s2 http://%s4" %
+                self.pkg("set-publisher --remove-mirror=test http://%s2 http://%s4" %
                     (self.bogus_url, self.bogus_url), exit=2)
                 self.pkg("set-publisher --remove-mirror=http://%s5" %
                     self.bogus_url, exit=2)
-                self.pkg("set-publisher --remove-mirror=mtest", exit=2)
-                self.pkg("set-publisher --remove-mirror=http://%s5 test" %
+                self.pkg("set-publisher --remove-mirror=test", exit=2)
+                self.pkg("set-publisher --remove-mirror=http://%s5 test1" %
                     self.bogus_url, exit=1)
-                self.pkg("set-publisher --remove-mirror=http://%s6 mtest" %
+                self.pkg("set-publisher --remove-mirror=http://%s6 test" %
                     self.bogus_url, exit=1)
-                self.pkg("set-publisher --remove-mirror=%s7 mtest" %
+                self.pkg("set-publisher --remove-mirror=%s7 test" %
                     self.bogus_url, exit=1)
 
 
@@ -316,7 +316,7 @@
             close """
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 2)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2"])
 
                 durl1 = self.dcs[1].get_depot_url()
                 self.pkgsend_bulk(durl1, self.foo1)
@@ -324,7 +324,7 @@
                 durl2 = self.dcs[2].get_depot_url()
                 self.pkgsend_bulk(durl2, self.bar1)
 
-                self.image_create(durl1, prefix = "test1")
+                self.image_create(durl1, prefix="test1")
                 self.pkg("set-publisher -O " + durl2 + " test2")
 
         def tearDown(self):
--- a/src/tests/cli/t_pkg_refresh.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_refresh.py	Fri Oct 23 17:43:37 2009 -0500
@@ -32,8 +32,11 @@
 import re
 import shutil
 import tempfile
+import time
 import unittest
 
+import pkg.catalog as catalog
+
 class TestPkgRefreshMulti(testutils.ManyDepotTestCase):
 
         foo1 = """
@@ -61,10 +64,12 @@
             close """
 
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, 2)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                    "test1"])
 
                 self.durl1 = self.dcs[1].get_depot_url()
                 self.durl2 = self.dcs[2].get_depot_url()
+                self.durl3 = self.dcs[3].get_depot_url()
 
         def reduce_spaces(self, string):
                 """Reduce runs of spaces down to a single space."""
@@ -77,6 +82,56 @@
                         expected.splitlines(), actual.splitlines(),
                         "Expected output", "Actual output", lineterm="")))
 
+        def get_op_entries(self, dc, op, op_ver, method="GET"):
+                """Scan logpath for a specific depotcontroller looking for
+                access log entries for an operation.  Returns a list of request
+                URIs for each log entry found for the operation in chronological
+                order."""
+
+                # 127.0.0.1 - - [15/Oct/2009:00:15:38]
+                # "GET /catalog/1/catalog.base.C HTTP/1.1" 200 189 ""
+                # "pkg/b1f63b112bff+ (sunos i86pc; 5.11 snv_122; none; pkg)"
+                entry_comps = [
+                    r"(?P<host>\S+)",
+                    r"\S+",
+                    r"(?P<user>\S+)",
+                    r"\[(?P<request_time>.+)\]",
+                    r'"(?P<request>.+)"',
+                    r"(?P<response_status>[0-9]+)",
+                    r"(?P<content_length>\S+)",
+                    r'"(?P<referer>.*)"',
+                    r'"(?P<user_agent>.*)"',
+                ]
+                log_entry = re.compile(r"\s+".join(entry_comps) + r"\s*\Z")
+
+                logpath = dc.get_logpath()
+                self.debug("check for operation entries in %s" % logpath)
+                logfile = open(logpath, "r")
+                entries = []
+                for line in logfile.readlines():
+                        m = log_entry.search(line)
+                        if not m:
+                                continue
+
+                        host, user, req_time, req, status, clen, ref, agent = \
+                            m.groups()
+
+                        req_method, uri, protocol = req.split(" ")
+                        if req_method != method:
+                                continue
+
+                        req_parts = uri.strip("/").split("/", 3)
+                        if req_parts[0] != op:
+                                continue
+
+                        if req_parts[1] != op_ver:
+                                continue
+                        entries.append(uri)
+                logfile.close()
+                self.debug("Found %s for %s /%s/%s/" % (entries, method, op,
+                    op_ver))
+                return entries
+
         def _check(self, expected, actual):
                 tmp_e = expected.splitlines()
                 tmp_e.sort()
@@ -101,14 +156,14 @@
                 """Test refresh and options."""
 
                 durl = self.dcs[1].get_depot_url()
-                self.image_create(durl)
+                self.image_create(durl, prefix="test1")
 
                 self.pkg("refresh")
                 self.pkg("refresh --full")
                 self.pkg("refresh -F", exit=2)
 
         def test_general_refresh(self):
-                self.image_create(self.durl1, prefix = "test1")
+                self.image_create(self.durl1, prefix="test1")
                 self.pkg("set-publisher -O " + self.durl2 + " test2")
                 self.pkgsend_bulk(self.durl1, self.foo10)
                 self.pkgsend_bulk(self.durl2, self.foo12)
@@ -131,7 +186,7 @@
                 self.checkAnswer(expected, self.output)
 
         def test_specific_refresh(self):
-                self.image_create(self.durl1, prefix = "test1")
+                self.image_create(self.durl1, prefix="test1")
                 self.pkg("set-publisher -O " + self.durl2 + " test2")
                 self.pkgsend_bulk(self.durl1, self.foo10)
                 self.pkgsend_bulk(self.durl2, self.foo12)
@@ -179,42 +234,53 @@
                 self.checkAnswer(expected, self.output)
 
         def test_set_publisher_induces_full_refresh(self):
-                self.pkgsend_bulk(self.durl2, self.foo11)
+                self.pkgsend_bulk(self.durl3, self.foo11)
+                self.pkgsend_bulk(self.durl3, self.foo10)
                 self.pkgsend_bulk(self.durl1, self.foo10)
-                self.image_create(self.durl1, prefix = "test1")
+                self.pkgsend_bulk(self.durl2, self.foo11)
+                self.image_create(self.durl1, prefix="test1")
                 self.pkg("list -aH pkg:/foo")
                 expected = \
                     "foo 1.0-0 known ----\n"
                 self.checkAnswer(expected, self.output)
-                self.pkg("set-publisher --no-refresh -O " +
-                    self.durl2 + " test1")
+
+                # If a privileged user requests this, it should fail since
+                # publisher metadata will have been refreshed, but it will
+                # be the metadata from a repository that does not contain
+                # package metadata for this publisher.
+                self.pkg("set-publisher -O " + self.durl2 + " test1")
+                self.pkg("list --no-refresh -avH pkg:/[email protected]", exit=1)
+                self.pkg("list --no-refresh -avH pkg:/[email protected]", exit=1)
 
                 # If a privileged user requests this, it should succeed since
-                # publisher metadata will automatically be refreshed when asking
-                # for all known packages and [email protected] exists in the new catalog.
-                self.pkg("list -aH pkg:/[email protected]")
-
-                self.pkg("set-publisher -O " + self.durl2 + " test1")
-                self.pkg("list -aH pkg:/foo")
+                # publisher metadata will have been refreshed, and contains
+                # package data for the publisher.
+                self.pkg("set-publisher -O " + self.durl3 + " test1")
+                self.pkg("list --no-refresh -afH pkg:/foo")
                 expected = \
+                    "foo 1.0-0 known u---\n" \
                     "foo 1.1-0 known ----\n"
                 self.checkAnswer(expected, self.output)
+
+                self.pkg("set-publisher --no-refresh -O " +
+                    self.durl3 + " test1")
                 self.pkg("set-publisher -O " + self.durl1 + " test2")
-                self.pkg("list -aH pkg:/foo")
+                self.pkg("list --no-refresh -aH pkg:/foo")
                 expected = \
                     "foo 1.1-0 known ----\n" \
                     "foo (test2) 1.0-0 known ----\n"
 
         def test_set_publisher_induces_delayed_full_refresh(self):
+                self.pkgsend_bulk(self.durl3, self.foo11)
                 self.pkgsend_bulk(self.durl2, self.foo11)
                 self.pkgsend_bulk(self.durl1, self.foo10)
-                self.image_create(self.durl1, prefix = "test1")
+                self.image_create(self.durl1, prefix="test1")
                 self.pkg("list -aH pkg:/foo")
                 expected = \
                     "foo 1.0-0 known ----\n"
                 self.checkAnswer(expected, self.output)
                 self.dcs[2].stop()
-                self.pkg("set-publisher --no-refresh -O " + self.durl2 + " test1")
+                self.pkg("set-publisher --no-refresh -O " + self.durl3 + " test1")
                 self.dcs[2].start()
 
                 # This should fail when listing all known packages, and running
@@ -228,17 +294,28 @@
 
                 # This should succeed when listing all known packages, and
                 # running as a privileged user since the publisher's metadata
-                # will automatically be updated.
+                # will automatically be updated, and the repository contains
+                # package data for the publisher.
                 self.pkg("list -aH pkg:/[email protected]")
                 expected = \
                     "foo 1.1-0 known ----\n"
                 self.checkAnswer(expected, self.output)
 
+                # This should fail when listing all known packages, and
+                # running as a privileged user since the publisher's metadata
+                # will automatically be updated, but the repository doesn't
+                # contain any data for the publisher.
+                self.dcs[2].stop()
+                self.pkg("set-publisher -O " + self.durl1 + " test1")
+                self.pkg("set-publisher --no-refresh -O " + self.durl2 + " test1")
+                self.dcs[2].start()
+                self.pkg("list -aH --no-refresh pkg:/[email protected]", exit=1)
+
         def test_refresh_certificate_problems(self):
                 """Verify that an invalid or inaccessible certificate does not
                 cause unexpected failure."""
 
-                self.image_create(self.durl1)
+                self.image_create(self.durl1, prefix="test1")
 
                 key_fh, key_path = tempfile.mkstemp(dir=self.get_test_prefix())
                 cert_fh, cert_path = tempfile.mkstemp(dir=self.get_test_prefix())
@@ -260,5 +337,198 @@
                 # failure when attempting to refresh.
                 self.pkg("refresh test1", su_wrap=True, exit=1)
 
+        def test_catalog_v1(self):
+                """Verify that refresh works as expected for publishers that
+                have repositories that offer catalog/1/ in exceptional error
+                cases."""
+
+                dc = self.dcs[1]
+                self.pkgsend_bulk(self.durl1, self.foo10)
+
+                # First, verify that full retrieval works.
+                self.image_create(self.durl1, prefix="test1")
+
+                self.pkg("list -aH pkg:/[email protected]")
+
+                # Only entries for the full catalog files should exist.
+                expected = [
+                    "/catalog/1/catalog.attrs",
+                    "/catalog/1/catalog.base.C"
+                ]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Next, verify that a "normal" incremental update works as
+                # expected when the catalog has changed.
+                self.pkgsend_bulk(self.durl1, self.foo11)
+
+                self.pkg("list -aH")
+                self.pkg("list -aH pkg:/[email protected]")
+                self.pkg("list -aH pkg:/[email protected]", exit=1)
+
+                self.pkg("refresh")
+                self.pkg("list -aH pkg:/[email protected]")
+
+                # A bit hacky, but load the repository's catalog directly
+                # and then get the list of updates files it has created.
+                cat_root = os.path.join(dc.get_repodir(), "catalog")
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                update = v1_cat.updates.keys()[-1]
+
+                # All of the entries from the previous operations, and then
+                # entries for the catalog attrs file, and one catalog update
+                # file for the incremental update should be returned.
+                expected += [
+                    "/catalog/1/catalog.attrs",
+                    "/catalog/1/%s" % update
+                ]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Next, verify that a "normal" incremental update works as
+                # expected when the catalog hasn't changed.
+                self.pkg("refresh test1")
+
+                # All of the entries from the previous operations, and then
+                # an entry for the catalog attrs file should be returned.
+                expected += [
+                    "/catalog/1/catalog.attrs"
+                ]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Next, verify that a "full" refresh after incrementals works
+                # as expected.
+                self.pkg("refresh --full test1")
+
+                # All of the entries from the previous operations, and then
+                # entries for each part of the catalog should be returned.
+                expected += ["/catalog/1/catalog.attrs"]
+                expected += ["/catalog/1/%s" % p for p in v1_cat.parts.keys()]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Next, verify that rebuilding the repository's catalog induces
+                # a full refresh.  Note that doing this wipes out the contents
+                # of the log so far, so expected needs to be reset and the
+                # catalog reloaded.
+                expected = []
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+
+                dc.stop()
+                dc.set_rebuild()
+                dc.start()
+                dc.set_norebuild()
+
+                self.pkg("refresh")
+
+                # The catalog.attrs will be retrieved twice due to the first
+                # request's incremental update failure.
+                expected += ["/catalog/1/catalog.attrs"]
+                expected += ["/catalog/1/catalog.attrs"]
+                expected += ["/catalog/1/%s" % p for p in v1_cat.parts.keys()]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Next, verify that if the client receives an incremental update
+                # but the catalog is then rolled back to an earlier version
+                # (think restoration of repository from backup) that the client
+                # will induce a full refresh.
+
+                # Preserve a copy of the existing repository.
+                tdir = tempfile.mkdtemp(dir=self.get_test_prefix())
+                trpath = os.path.join(tdir, os.path.basename(dc.get_repodir()))
+                shutil.copytree(dc.get_repodir(), trpath)
+
+                # Publish a new package.
+                self.pkgsend_bulk(self.durl1, self.foo12)
+
+                # Refresh to get an incremental update, and verify it worked.
+                self.pkg("refresh")
+                update = v1_cat.updates.keys()[-1]
+                expected += [
+                    "/catalog/1/catalog.attrs",
+                    "/catalog/1/%s" % update
+                ]
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+
+                # Stop the depot server and put the old repository data back.
+                dc.stop()
+                shutil.rmtree(dc.get_repodir())
+                shutil.move(trpath, dc.get_repodir())
+                dc.start()
+                expected = []
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+
+                # Now verify that a refresh induces a full retrieval.  The
+                # catalog.attrs file will be retrieved twice due to the
+                # failure case.
+                self.pkg("refresh")
+                expected += ["/catalog/1/catalog.attrs"]
+                expected += ["/catalog/1/catalog.attrs"]
+                expected += ["/catalog/1/%s" % p for p in v1_cat.parts.keys()]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Next, verify that if the client receives an incremental update
+                # but the catalog is then rolled back to an earlier version
+                # (think restoration of repository from backup) and then an
+                # update that has already happened before is republished that
+                # the client will induce a full refresh.
+
+                # Preserve a copy of the existing repository.
+                trpath = os.path.join(tdir, os.path.basename(dc.get_repodir()))
+                shutil.copytree(dc.get_repodir(), trpath)
+
+                # Publish a new package.
+                self.pkgsend_bulk(self.durl1, self.foo12)
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+
+                # Refresh to get an incremental update, and verify it worked.
+                self.pkg("refresh")
+                update = v1_cat.updates.keys()[-1]
+                expected += [
+                    "/catalog/1/catalog.attrs",
+                    "/catalog/1/%s" % update
+                ]
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+
+                # Stop the depot server and put the old repository data back.
+                dc.stop()
+                shutil.rmtree(dc.get_repodir())
+                shutil.move(trpath, dc.get_repodir())
+                dc.start()
+                expected = []
+
+                # Re-publish the new package.  This causes the same catalog
+                # entry to exist, but at a different point in time in the
+                # update logs.
+                self.pkgsend_bulk(self.durl1, self.foo12)
+                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                update = v1_cat.updates.keys()[-1]
+
+                # Now verify that a refresh induces a full retrieval.  The
+                # catalog.attrs file will be retrieved twice due to the
+                # failure case, and a retrieval of the incremental update
+                # file that failed to be applied should also be seen.
+                self.pkg("refresh")
+                expected += [
+                    "/catalog/1/catalog.attrs",
+                    "/catalog/1/%s" % update,
+                    "/catalog/1/catalog.attrs",
+                ]
+                expected += ["/catalog/1/%s" % p for p in v1_cat.parts.keys()]
+                returned = self.get_op_entries(dc, "catalog", "1")
+                self.assertEqual(returned, expected)
+
+                # Finally, purposefully corrupt the catalog.attrs file in the
+                # repository and attempt a refresh.  The client should fail
+                # gracefully.
+                f = open(os.path.join(cat_root, "catalog.attrs"), "wb")
+                f.write("INVALID")
+                f.close()
+                self.pkg("refresh", exit=1)
+
+
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_search.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkg_search.py	Fri Oct 23 17:43:37 2009 -0500
@@ -71,7 +71,7 @@
 close """
 
         bogus_pkg10 = """
-set name=fmri value=pkg:/[email protected],5.11-0:20090326T233451Z
+set name=pkg.fmri value=pkg:/[email protected],5.11-0:20090326T233451Z
 set name=description value=""validation with simple chains of constraints ""
 set name=pkg.description value="pseudo-hashes as arrays tied to a "type" (list of fields)"
 depend fmri=XML-Atom-Entry
@@ -125,7 +125,7 @@
             headers,
             "com.sun.service.random_test set       79                        pkg:/[email protected]\n"
         ])
-        
+
         res_remote_keywords = set([
             headers,
             "com.sun.service.keywords set       separator                 pkg:/[email protected]\n"
@@ -139,7 +139,8 @@
         res_remote_wildcard = set([
             headers,
             "basename   file      bin/example_path          pkg:/[email protected]\n",
-            "basename   dir       bin/example_dir           pkg:/[email protected]\n"
+            "basename   dir       bin/example_dir           pkg:/[email protected]\n",
+            "pkg.fmri   set       test/example_pkg          pkg:/[email protected]\n"
         ])
 
         res_remote_glob = set([
@@ -147,7 +148,8 @@
             "basename   file      bin/example_path          pkg:/[email protected]\n",
             "basename   dir       bin/example_dir           pkg:/[email protected]\n",
             "path       file      bin/example_path          pkg:/[email protected]\n",
-            "path       dir       bin/example_dir           pkg:/[email protected]\n"
+            "path       dir       bin/example_dir           pkg:/[email protected]\n",
+            "pkg.fmri   set       test/example_pkg          pkg:/[email protected]\n"
         ])
 
         res_remote_foo = set([
@@ -160,14 +162,10 @@
             "description set       bAr                       pkg:/[email protected]\n"
         ])
 
-        local_fmri_string = \
-            "fmri       set        test/example_pkg              pkg:/[email protected]\n"
-
-
         res_local_pkg = set([
-                headers,
-                local_fmri_string
-                ])
+            headers,
+            "pkg.fmri       set        test/example_pkg              pkg:/[email protected]\n"
+        ])
 
         res_local_path = copy.copy(res_remote_path)
 
@@ -183,10 +181,8 @@
         res_local_keywords = copy.copy(res_remote_keywords)
 
         res_local_wildcard = copy.copy(res_remote_wildcard)
-        res_local_wildcard.add(local_fmri_string)
 
         res_local_glob = copy.copy(res_remote_glob)
-        res_local_glob.add(local_fmri_string)
 
         res_local_foo = copy.copy(res_remote_foo)
         res_local_bar = copy.copy(res_remote_bar)
@@ -215,7 +211,7 @@
 
         res_bogus_name_result = set([
             headers,
-            'fmri       set       bogus_pkg                 pkg:/[email protected]\n'
+            'pkg.fmri       set       bogus_pkg                 pkg:/[email protected]\n'
         ])
 
         res_bogus_number_result = set([
@@ -304,8 +300,8 @@
                     sorted([p.strip().split() for p in proposed_answer]) == \
                     sorted([c.strip().split() for c in correct_answer]):
                         return True
-                print "Proposed Answer: " + str(proposed_answer)
-                print "Correct Answer : " + str(correct_answer)
+                self.debug("Proposed Answer: " + str(proposed_answer))
+                self.debug("Correct Answer : " + str(correct_answer))
                 if isinstance(correct_answer, set) and \
                     isinstance(proposed_answer, set):
                         print >> sys.stderr, "Missing: " + \
@@ -315,7 +311,7 @@
                 self.assert_(correct_answer == proposed_answer)
 
         def _search_op(self, remote, token, test_value, case_sensitive=False,
-            return_actions=True):
+            return_actions=True, exit=0):
                 outfile = os.path.join(self.testdata_dir, "res")
                 if remote:
                         token = "-r " + token
@@ -327,16 +323,14 @@
                         token = "-a " + token
                 else:
                         token = "-p " + token
-                self.pkg("search " + token + " > " + outfile)
+                self.pkg("search " + token + " > " + outfile, exit=exit)
                 res_list = (open(outfile, "rb")).readlines()
                 self._check(set(res_list), test_value)
 
         def _run_remote_tests(self):
-                # Set to 1 since searches can't currently be performed
-                # package name unless it's set inside the
-                # manifest which happens at install time on
-                # the client side.
-                self.pkg("search -a -r example_pkg", exit=1)
+                # This should be possible now that the server automatically adds
+                # FMRIs to manifests (during publication).
+                self.pkg("search -a -r example_pkg")
 
                 self._search_op(True, "example_path", self.res_remote_path)
                 self._search_op(True, "'(example_path)'", self.res_remote_path)
@@ -401,7 +395,7 @@
                 self.pkg("search -a -r 'e* AND <e*>'", exit=1)
                 self.pkg("search -a -r '<e*> OR e*'", exit=1)
                 self.pkg("search -a -r 'e* OR <e*>'", exit=1)
-                
+
         def _run_local_tests(self):
                 outfile = os.path.join(self.testdata_dir, "res")
 
@@ -535,13 +529,13 @@
                 self.pkg("search -r '*'")
                 self.pkg("search -s %s '*'" % durl)
                 self.pkg("search -l '*'", exit=1)
-                
+
         def test_local_0(self):
                 """Install one package, and run the search suite."""
                 # Need to retain that -l works as expected
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                
+
                 self.image_create(durl)
 
                 self.pkg("install example_pkg")
@@ -569,10 +563,12 @@
                 self.dc.set_rebuild()
                 self.dc.start()
 
-                self._search_op(True, "*bogus*",
-                    set(self.res_bogus_name_result))
-                self._search_op(True, "6627937",
-                    set(self.res_bogus_number_result))
+                # Should return nothing, as the server can't build catalog
+                # data for the package since the manifest is unparseable.
+                self._search_op(True, "*bogus*", set(), exit=1)
+                self._search_op(True, "6627937", set(), exit=1)
+
+                # Should fail since the bogus_pkg isn't even in the catalog.
                 self.pkg("install bogus_pkg", exit=1)
 
                 client_pkg_dir = os.path.join(self.img_path, "var", "pkg",
@@ -611,7 +607,7 @@
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.fat_pkg10)
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                
+
                 self.image_create(durl)
 
                 self.pkg("install fat")
@@ -636,7 +632,7 @@
 
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                
+
                 self.image_create(durl)
 
                 self.pkg("install example_pkg")
@@ -663,7 +659,7 @@
 
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                
+
                 self.image_create(durl)
 
                 o_options = "action.name,action.key,pkg.name,pkg.shortfmri," \
@@ -699,7 +695,7 @@
                     self.res_pkg_options_local)
                 self._search_op(False, "%s '<example_path>'" % pkg_options,
                     self.res_pkg_options_local)
-                
+
                 id, tid = self._get_index_dirs()
                 shutil.rmtree(id)
                 self._search_op(False, "-o %s example_path" % o_options,
--- a/src/tests/cli/t_pkgrecv.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkgrecv.py	Fri Oct 23 17:43:37 2009 -0500
@@ -35,7 +35,6 @@
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import pkg.misc as misc
-import pkg.server.config as config
 import pkg.server.repository as repo
 import pkg.server.repositoryconfig as rc
 import re
@@ -109,13 +108,13 @@
                     "/tmp/copyright2", "/tmp/copyright3",
                     "/tmp/libc.so.1", "/tmp/sh"]
 
-        def setUp(self, ndepots=2, debug_features=None):
+        def setUp(self):
                 """ Start two depots.
                     depot 1 gets foo and moo, depot 2 gets foo and bar
                     depot1 is mapped to publisher test1 (preferred)
                     depot2 is mapped to publisher test2 """
 
-                testutils.ManyDepotTestCase.setUp(self, 2)
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2"])
 
                 for p in self.misc_files:
                         f = open(p, "w")
@@ -152,26 +151,13 @@
                 parts = urlparse.urlparse(uri, "file", allow_fragments=0)
                 path = urllib.url2pathname(parts[2])
 
-                scfg = config.SvrConfig(path, None, None)
                 try:
-                        scfg.init_dirs()
-                except (config.SvrConfigError, EnvironmentError), e:
-                        raise repo.RepositoryError(_("An error occurred while "
-                            "trying to initialize the repository directory "
-                            "structures:\n%s") % e)
-
-                scfg.acquire_in_flight()
-
-                try:
-                        scfg.acquire_catalog()
-                except catalog.CatalogPermissionsException, e:
-                        raise repo.RepositoryError(str(e))
-
-                try:
-                        return repo.Repository(scfg)
-                except rc.InvalidAttributeValueError, e:
-                        raise repo.RepositoryError(_("The specified repository's "
-                            "configuration data is not valid:\n%s") % e)
+                        return repo.Repository(auto_create=False,
+                            fork_allowed=False, repo_root=path)
+                except rc.PropertyError, e:
+                        raise repo.RepositoryError(_("The specified "
+                            "repository's configuration data is not "
+                            "valid:\n%s") % e)
 
         @staticmethod
         def reduceSpaces(string):
@@ -200,15 +186,20 @@
                 # Test help.
                 self.pkgrecv(command="-h", exit=0)
 
+                # Verify that a non-existent repository results in failure.
+                npath = os.path.join(self.get_test_prefix(), "nochance")
+                self.pkgrecv(self.durl1, "-d file://%s foo" % npath,  exit=1)
+
                 # Test list newest.
                 self.pkgrecv(self.durl1, "-n")
                 output = self.reduceSpaces(self.output)
 
-                # The latest version of amber and bronze should be listed.
-                amber = "pkg:/" + self.published[1]
-                scheme = "pkg:/" + self.published[6]
-                bronze = "pkg:/" + self.published[4]
-                tree = "pkg:/" + self.published[5]
+                # The latest version of amber and bronze should be listed
+                # (sans publisher prefix currently).
+                amber = self.published[1].replace("pkg://test1/", "pkg:/")
+                scheme = self.published[6].replace("pkg://test1/", "pkg:/")
+                bronze = self.published[4].replace("pkg://test1/", "pkg:/")
+                tree = self.published[5].replace("pkg://test1/", "pkg:/")
                 expected = "\n".join((amber, scheme, tree, bronze)) + "\n"
                 self.assertEqualDiff(expected, output)
 
@@ -290,6 +281,8 @@
 
                 # Second, pkgrecv to the pkg to a file repository.
                 npath = tempfile.mkdtemp(dir=self.get_test_prefix())
+                self.pkgsend("file://%s" % npath,
+                    "create-repository --set-property publisher.prefix=test1")
                 self.pkgrecv(self.durl1, "-d file://%s %s" % (npath, f))
 
                 # Next, compare the manifests (this will also only succeed if
@@ -298,6 +291,8 @@
                 old = orepo.manifest(f)
                 new = nrepo.manifest(f)
 
+                self.debug(old)
+                self.debug(new)
                 self.assertEqual(misc.get_data_digest(old),
                     misc.get_data_digest(new))
 
@@ -351,13 +346,15 @@
 
                 # Fourth, create an image and verify that the sent package is
                 # seen by the client.
-                self.image_create(self.durl2)
+                self.image_create(self.durl2, prefix="test1")
                 self.pkg("info -r [email protected]")
 
                 # Fifth, pkgrecv the pkg to a file repository and compare the
                 # manifest of a package published with the scheme (pkg:/) given.
                 f = fmri.PkgFmri(self.published[6], None)
                 npath = tempfile.mkdtemp(dir=self.get_test_prefix())
+                self.pkgsend("file://%s" % npath,
+                    "create-repository --set-property publisher.prefix=test1")
                 self.pkgrecv(self.durl1, "-d file://%s %s" % (npath, f))
 
                 # Next, compare the manifests (this will also only succeed if
--- a/src/tests/cli/t_pkgsend.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/t_pkgsend.py	Fri Oct 23 17:43:37 2009 -0500
@@ -204,7 +204,8 @@
 
                 self.dc.stop()
                 rpath = os.path.join(self.get_test_prefix(), "example_repo")
-                self.pkgsend("file://%s" % rpath, "create-repository")
+                self.pkgsend("file://%s" % rpath,
+                    "create-repository --set-property publisher.prefix=test")
 
                 # Now verify that the repository was created by starting the
                 # depot server in readonly mode using the target repository.
@@ -220,10 +221,6 @@
                 self.pkgsend("http://invalid.test1", "create-repository", exit=1)
                 self.pkgsend("https://invalid.test2", "create-repository", exit=1)
 
-                # Finally, verify that specifying extra operands to
-                # create-repository fails as expected.
-                self.pkgsend("https://invalid.test2", "create-repository bobcat", exit=2)
-
         def test_8_bug_7908(self):
                 """Verify that when provided the name of a symbolic link to a
                 file, that publishing will still work as expected."""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_publish_api.py	Fri Oct 23 17:43:37 2009 -0500
@@ -0,0 +1,76 @@
+#!/usr/bin/python2.4
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+#
+
+import testutils
+if __name__ == "__main__":
+	testutils.setup_environment("../../../proto")
+
+import os
+import pkg.fmri as fmri
+import pkg.publish.transaction as trans
+import urlparse
+import urllib
+
+class TestPkgPublicationApi(testutils.SingleDepotTestCase):
+        """Various publication tests."""
+
+        # Restart the depot and recreate the repository every test.
+        persistent_depot = False
+
+        def test_stress_http_publish(self):
+                """Publish lots of packages rapidly ensuring that http
+                publication can handle it."""
+
+                durl = self.dc.get_depot_url()
+
+                # Each version number must be unique since multiple packages
+                # will be published within the same second.
+                for i in range(100):
+                        pf = fmri.PkgFmri("foo@%d.0" % i, "5.11")
+                        t = trans.Transaction(durl, pkg_name=str(pf))
+                        t.open()
+                        pkg_fmri, pkg_state = t.close(refresh_index=True)
+                        self.debug("%s: %s" % (pkg_fmri, pkg_state))
+
+        def test_stress_file_publish(self):
+                """Publish lots of packages rapidly ensuring that file
+                publication can handle it."""
+
+                location = self.dc.get_repodir()
+                location = os.path.abspath(location)
+                location = urlparse.urlunparse(("file", "",
+                    urllib.pathname2url(location), "", "", ""))
+
+                # Each version number must be unique since multiple packages
+                # will be published within the same second.
+                for i in range(100):
+                        pf = fmri.PkgFmri("foo@%d.0" % i, "5.11")
+                        t = trans.Transaction(location, pkg_name=str(pf))
+                        t.open()
+                        pkg_fmri, pkg_state = t.close(refresh_index=True)
+                        self.debug("%s: %s" % (pkg_fmri, pkg_state))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_util_merge.py	Fri Oct 23 17:43:37 2009 -0500
@@ -0,0 +1,255 @@
+#!/usr/bin/python2.4
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+#
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+
+import difflib
+import os
+import pkg.catalog as catalog
+import pkg.fmri as fmri
+import pkg.manifest as manifest
+import pkg.misc as misc
+import pkg.server.repository as repo
+import pkg.server.repositoryconfig as rc
+import re
+import shutil
+import sys
+import tempfile
+import time
+import urllib
+import urlparse
+import unittest
+import zlib
+
+path_to_pub_util = "../util/publish"
+
+class TestUtilMerge(testutils.ManyDepotTestCase):
+        # Cleanup after every test.
+        persistent_depot = False
+
+        scheme10 = """
+            open pkg:/[email protected],5.11-0
+            close 
+        """
+
+        tree10 = """
+            open [email protected],5.11-0
+            close 
+        """
+
+        amber10 = """
+            open [email protected],5.11-0
+            add depend fmri=pkg:/[email protected] type=require
+            close 
+        """
+
+        amber20 = """
+            open [email protected],5.11-0
+            add depend fmri=pkg:/[email protected] type=require
+            close 
+        """
+
+        bronze10 = """
+            open [email protected],5.11-0
+            add dir mode=0755 owner=root group=bin path=/usr
+            add dir mode=0755 owner=root group=bin path=/usr/bin
+            add file /tmp/sh mode=0555 owner=root group=bin path=/usr/bin/sh
+            add link path=/usr/bin/jsh target=./sh
+            add hardlink path=/lib/libc.bronze target=/lib/libc.so.1
+            add file /tmp/bronze1 mode=0444 owner=root group=bin path=/etc/bronze1
+            add file /tmp/bronze2 mode=0444 owner=root group=bin path=/etc/bronze2
+            add file /tmp/bronzeA1 mode=0444 owner=root group=bin path=/A/B/C/D/E/F/bronzeA1
+            add depend fmri=pkg:/[email protected] type=require
+            add license /tmp/copyright2 license=copyright
+            close
+        """
+
+        bronze20 = """
+            open [email protected],5.11-0
+            add dir mode=0755 owner=root group=bin path=/etc
+            add dir mode=0755 owner=root group=bin path=/lib
+            add file /tmp/sh mode=0555 owner=root group=bin path=/usr/bin/sh
+            add file /tmp/libc.so.1 mode=0555 owner=root group=bin path=/lib/libc.bronze
+            add link path=/usr/bin/jsh target=./sh
+            add hardlink path=/lib/libc.bronze2.0.hardlink target=/lib/libc.so.1
+            add file /tmp/bronze1 mode=0444 owner=root group=bin path=/etc/bronze1
+            add file /tmp/bronze2 mode=0444 owner=root group=bin path=/etc/amber2
+            add license /tmp/copyright3 license=copyright
+            add file /tmp/bronzeA2 mode=0444 owner=root group=bin path=/A1/B2/C3/D4/E5/F6/bronzeA2
+            add depend fmri=pkg:/[email protected] type=require
+            close 
+        """
+
+        misc_files = [ "/tmp/bronzeA1",  "/tmp/bronzeA2",
+                    "/tmp/bronze1", "/tmp/bronze2",
+                    "/tmp/copyright2", "/tmp/copyright3",
+                    "/tmp/libc.so.1", "/tmp/sh"]
+
+        def setUp(self):
+                """ Start two depots.
+                    depot 1 gets foo and moo, depot 2 gets foo and bar
+                    depot1 is mapped to publisher test1 (preferred)
+                    depot2 is mapped to publisher test2 """
+
+                testutils.ManyDepotTestCase.setUp(self, ["os.org", "os.org"])
+                for p in self.misc_files:
+                        f = open(p, "w")
+                        # write the name of the file into the file, so that
+                        # all files have differing contents
+                        f.write(p)
+                        f.close()
+                        self.debug("wrote %s" % p)
+
+                # Publish a set of packages to one repository.
+                self.dpath1 = self.dcs[1].get_repodir()
+                self.durl1 = self.dcs[1].get_depot_url()
+                self.published = self.pkgsend_bulk(self.durl1, self.amber10 + \
+                    self.amber20 + self.bronze10 + self.bronze20 + \
+                    self.tree10 + self.scheme10)
+
+                # Ensure timestamps of all successive publications are greater.
+                import time
+                time.sleep(1)
+
+                # Publish the same set to another repository (minus the tree
+                # and scheme packages).
+                self.dpath2 = self.dcs[2].get_repodir()
+                self.durl2 = self.dcs[2].get_depot_url()
+                self.published += self.pkgsend_bulk(self.durl2, self.amber10 + \
+                    self.amber20 + self.bronze10 + self.bronze20)
+
+                self.merge_dir = tempfile.mkdtemp(dir=self.get_test_prefix())
+
+        def tearDown(self):
+                testutils.ManyDepotTestCase.tearDown(self)
+                for p in self.misc_files:
+                        os.remove(p)
+
+        def assertEqualDiff(self, expected, actual):
+                self.assertEqual(expected, actual,
+                    "Actual output differed from expected output.\n" +
+                    "\n".join(difflib.unified_diff(
+                        expected.splitlines(), actual.splitlines(),
+                        "Expected output", "Actual output", lineterm="")))
+
+        @staticmethod
+        def get_repo(uri):
+                parts = urlparse.urlparse(uri, "file", allow_fragments=0)
+                path = urllib.url2pathname(parts[2])
+
+                try:
+                        return repo.Repository(auto_create=False,
+                            fork_allowed=False, repo_root=path)
+                except rc.PropertyError, e:
+                        raise repo.RepositoryError(_("The specified "
+                            "repository's configuration data is not "
+                            "valid:\n%s") % e)
+
+        def merge(self, args=misc.EmptyI, exit=0):
+                prog = os.path.realpath(os.path.join(path_to_pub_util,
+                    "merge.py"))
+                cmd = "%s %s" % (prog, " ".join(args))
+                self.cmdline_run(cmd, exit=exit)
+
+        def test_0_merge(self):
+                """Verify that merge functionality works as expected."""
+
+                pkg_names = set()
+                flist = []
+                for p in self.published:
+                        f = fmri.PkgFmri(p, "5.11")
+                        pkg_names.add(f.pkg_name)
+                        flist.append(f)
+
+                self.merge([
+                    "-r",
+                    "-d %s" % self.merge_dir,
+                    "-v sparc,%s" % self.durl1,
+                    "-v i386,%s" % self.durl2,
+                    "arch",
+                    " ".join(pkg_names),
+                ])
+
+                # Only get the newest FMRIs for each package.
+                flist.sort()
+                nlist = {}
+                for f in reversed(flist):
+                        if f.pkg_name in nlist:
+                                continue
+                        nlist[f.pkg_name] = f
+                nlist = nlist.values()
+
+                def get_expected(f):
+                        exp_lines = ["set name=pkg.fmri value=%s" % f]
+                        for dc in self.dcs.values():
+                                mpath = os.path.join(dc.get_repodir(),
+                                    "pkg", f.get_dir_path())
+
+                                if not os.path.exists(mpath):
+                                        # Not in this repository, check next.
+                                        continue
+
+                                m = open(mpath, "rb")
+                                for l in m:
+                                        if l.find("name=pkg.fmri") > -1:
+                                                continue
+                                        if l.find("name=variant") > -1:
+                                                continue
+                                        if not l.strip():
+                                                continue
+                                        exp_lines.append(l.strip())
+                                m.close()
+
+                        if f.pkg_name in ("tree", "scheme"):
+                                # These packages are only published for sparc.
+                                exp_lines.append("set name=variant.arch value=sparc")
+                        else:
+                                # Everything else is published for all variants.
+                                exp_lines.append("set name=variant.arch value=sparc value=i386")
+                        return "\n".join(sorted(exp_lines))
+
+                # Now load the manifest file for each package and verify that
+                # the merged manifest matches expectations.
+                for f in nlist:
+                        mpath = os.path.join(self.merge_dir, f.pkg_name,
+                            "manifest")
+
+                        m = open(mpath, "rb")
+                        returned = "".join(sorted(l for l in m))
+                        returned = returned.strip()
+                        m.close()
+
+                        # Generate expected and verify.
+                        expected = get_expected(f)
+                        self.assertEqualDiff(expected, returned)
+
+
+if __name__ == "__main__":
+        unittest.main()
+
--- a/src/tests/cli/testutils.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/cli/testutils.py	Fri Oct 23 17:43:37 2009 -0500
@@ -41,6 +41,7 @@
 sys.path.insert(0, path_to_parent)
 
 import pkg5unittest
+from pkg.misc import EmptyI
 
 g_proto_area=""
 
@@ -534,7 +535,7 @@
                                         continue
                                 retcode, published = self.pkgsend(depot_url, line, exit=exit)
                                 if retcode == 0 and published:
-                                        plist.append(published[len("pkg:/"):])
+                                        plist.append(published)
 
                 except (TracebackException, UnexpectedExitCodeException):
                         if os.environ.get("PKG_TRANS_ID", None):
@@ -555,6 +556,50 @@
                         raise UnexpectedExitCodeException(cmdline, exit,
                             retcode, output, debug=self.get_debugbuf())
 
+        def copy_repository(self, src, src_pub, dest, dest_pub):
+                """Copies the packages from the src repository to a new
+                destination repository that will be created at dest.  In
+                addition, any packages from the src_pub will be assigned
+                to the dest_pub during the copy.  The new repository will
+                not have a catalog or search indices, so a depot server
+                pointed at the new repository must be started with the
+                --rebuild option."""
+
+                shutil.rmtree(dest, True)
+                os.makedirs(dest, mode=0755)
+
+                for entry in os.listdir(src):
+                        spath = os.path.join(src, entry)
+
+                        # Skip the catalog, index, and pkg directories
+                        # as they will be copied manually.  Also skip
+                        # any unknown files in the repository directory.
+                        if entry in ("catalog", "index", "pkg") or \
+                            not os.path.isdir(spath):
+                                continue
+                        shutil.copytree(spath, os.path.join(dest, entry))
+
+                # Now copy each manifest and replace any references to the old
+                # publisher with that of the new publisher as they are copied.
+                pkg_root = os.path.join(src, "pkg")
+                for stem in os.listdir(pkg_root):
+                        pkg_path = os.path.join(pkg_root, stem)
+                        for mname in os.listdir(pkg_path):
+                                # Ensure destination manifest directory exists.
+                                dmdpath = os.path.join(dest, "pkg", stem)
+                                if not os.path.isdir(dmdpath):
+                                        os.makedirs(dmdpath, mode=0755)
+
+                                msrc = open(os.path.join(pkg_path, mname), "rb")
+                                mdest = open(os.path.join(dmdpath, mname), "wb")
+                                for l in msrc:
+                                        if l.find("pkg://") > -1:
+                                                mdest.write(l.replace(src_pub,
+                                                    dest_pub))
+                                        else:
+                                                mdest.write(l)
+                                msrc.close()
+                                mdest.close()
 
         def validate_html_file(self, fname, exit=0, comment="",
             options="-quiet -utf8"):
@@ -586,7 +631,7 @@
                 return retcode
 
         def start_depot(self, port, depotdir, logpath, refresh_index=False,
-            debug_features=None):
+            debug_features=EmptyI, properties=EmptyI):
                 """ Convenience routine to help subclasses start
                     depots.  Returns a depotcontroller. """
 
@@ -601,12 +646,14 @@
                 dc = depotcontroller.DepotController()
                 dc.set_depotd_path(g_proto_area + "/usr/lib/pkg.depotd")
                 dc.set_depotd_content_root(g_proto_area + "/usr/share/lib/pkg")
-                if debug_features:
-                        for f in debug_features:
-                                dc.set_debug_feature(f)
+                for f in debug_features:
+                        dc.set_debug_feature(f)
                 dc.set_repodir(depotdir)
                 dc.set_logpath(logpath)
                 dc.set_port(port)
+                for section in properties:
+                        for prop, val in properties[section].iteritems():
+                                dc.set_property(section, prop, val)
                 if refresh_index:
                         dc.set_refresh_index()
                 dc.start()
@@ -615,18 +662,17 @@
 
 class ManyDepotTestCase(CliTestCase):
 
-        def setUp(self, ndepots, debug_features=None):
-                # Note that this must be deferred until after PYTHONPATH
-                # is set up.
-                import pkg.depotcontroller as depotcontroller
-
+        def setUp(self, publishers, debug_features=EmptyI):
                 CliTestCase.setUp(self)
 
                 self.debug("setup: %s" % self.id())
-                self.debug("starting %d depot(s)" % ndepots)
+                self.debug("starting %d depot(s)" % len(publishers))
+                self.debug("publishers: %s" % publishers)
+                self.debug("debug_features: %s" % list(debug_features))
                 self.dcs = {}
 
-                for i in range(1, ndepots + 1):
+                for n, pub in enumerate(publishers):
+                        i = n + 1
                         testdir = os.path.join(self.get_test_prefix(),
                             self.id())
 
@@ -645,9 +691,11 @@
                         depot_logfile = os.path.join(testdir,
                             "depot_logfile%d" % i)
 
+                        props = { "publisher": { "prefix": pub } }
                         self.dcs[i] = self.start_depot(12000 + i,
                             depotdir, depot_logfile,
-                            debug_features=debug_features)
+                            debug_features=debug_features,
+                            properties=props)
 
         def check_traceback(self, logpath):
                 """ Scan logpath looking for tracebacks.
@@ -689,21 +737,19 @@
                         result = self.defaultTestResult()
                 CliTestCase.run(self, result)
 
+
 class SingleDepotTestCase(ManyDepotTestCase):
 
-        def setUp(self, debug_features=None):
-
-                # Note that this must be deferred until after PYTHONPATH
-                # is set up.
-                import pkg.depotcontroller as depotcontroller
-
-                ManyDepotTestCase.setUp(self, 1, debug_features=debug_features)
+        def setUp(self, debug_features=EmptyI, publisher="test"):
+                ManyDepotTestCase.setUp(self, [publisher],
+                    debug_features=debug_features)
                 self.dc = self.dcs[1]
 
         def tearDown(self):
                 ManyDepotTestCase.tearDown(self)
                 self.dc = None
 
+
 class SingleDepotTestCaseCorruptImage(SingleDepotTestCase):
         """ A class which allows manipulation of the image directory that
         SingleDepotTestCase creates. Specifically, it supports removing one
@@ -716,9 +762,10 @@
         for example).
         """
 
-        def setUp(self):
+        def setUp(self, debug_features=EmptyI, publisher="test"):
                 self.backup_img_path = None
-                SingleDepotTestCase.setUp(self)
+                SingleDepotTestCase.setUp(self, debug_features=debug_features,
+                    publisher=publisher)
 
         def tearDown(self):
                 self.__uncorrupt_img_path()
--- a/src/tests/gui/t_pm_rmrepo.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/tests/gui/t_pm_rmrepo.py	Fri Oct 23 17:43:37 2009 -0500
@@ -45,8 +45,8 @@
             open bar@1,5.11-0
             close """
 
-        def setUp(self, ndepots, debug_features=None):
-                testutils.ManyDepotTestCase.setUp(self, 2)
+        def setUp(self):
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2"])
 
                 durl1 = self.dcs[1].get_depot_url()
                 self.pkgsend_bulk(durl1, self.foo1)
@@ -54,7 +54,7 @@
                 durl2 = self.dcs[2].get_depot_url()
                 self.pkgsend_bulk(durl2, self.bar1)
 
-                self.image_create(durl1, prefix = "test1")
+                self.image_create(durl1, prefix="test1")
                 self.pkg("set-publisher -O " + durl2 + " test2")
 
         def tearDown(self):
--- a/src/util/distro-import/importer.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/util/distro-import/importer.py	Fri Oct 23 17:43:37 2009 -0500
@@ -54,7 +54,6 @@
 branch_dict = {}     # 
 create_repo = False  #
 curpkg = None        # which IPS package we're currently importing
-defer_refresh = False
 def_branch = ""      # default branch
 def_repo = "http://localhost:10000"
 def_vers = "0.5.11"  # default package version
@@ -627,7 +626,7 @@
         for a in depend_actions:
                 publish_action(t, pkg, a)
 
-        pkg_fmri, pkg_state = t.close(refresh_index=not defer_refresh)
+        pkg_fmri, pkg_state = t.close(refresh_index=False)
         print "%s: %s\n" % (pkg_fmri, pkg_state)
 
 def search_dicts(path):
@@ -1066,7 +1065,6 @@
                             "(%s:%s)" % (token, lexer.infile, lexer.lineno))
 def main_func():
         global create_repo
-        global defer_refresh
         global def_branch
         global def_repo
         global def_vers
@@ -1212,16 +1210,12 @@
         else:
                 newpkgs = set(pkgdict.values())
 
-        # Indicates whether search indices refresh will be deferred until the end.
-        defer_refresh = False
         # Indicates whether local publishing is active.
         local_publish = False
         if def_repo.startswith("file:"):
-                # If publishing to disk, the search indices should be refreshed at
-                # the end of the publishing process and the feed cache will have to be
-                # generated by starting the depot server using the provided path and
-                # then accessing it.
-                defer_refresh = True
+                # If publishing to disk, the feed cache will have to be
+                # generated by starting the depot server using the provided path
+                # and then accessing it.
                 local_publish = True
 
         processed = 0
@@ -1238,8 +1232,8 @@
                 except trans.TransactionError, _e:
                         print "%s: FAILED: %s\n" % (_p.name, _e)
                         error_count += 1
+                processed += 1
                 if show_debug:
-                        processed += 1
                         print "%d/%d packages processed; %.2f%% complete" % (processed, total,
                             processed * 100.0 / total)
 
@@ -1248,12 +1242,6 @@
                     error_count * 100.0 / total)
                 sys.exit(1)
 
-        if not nopublish and defer_refresh:
-                # This has to be done at the end for some publishing modes.
-                print "Updating search indices..."
-                _t = trans.Transaction(def_repo)
-                _t.refresh_index()
-
         # Ensure that the feed is updated and cached to reflect changes.
         if not nopublish:
                 print "Caching RSS/Atom feed..."
--- a/src/util/publish/merge.py	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/util/publish/merge.py	Fri Oct 23 17:43:37 2009 -0500
@@ -54,7 +54,7 @@
 Usage:
         %s -r [-d dir] [-n] -v varname,url -v varname,url [-v varname,url ...] variant_type  pkgname [pkgname ...]
 
-        example: 
+        example:
 
         %s -r -d /tmp/merge -n -v sparc,http://server1 -v i386,http://server2 arch entire
         """ % (pname(), pname()))
@@ -177,7 +177,7 @@
         # call catalog.recv to pull down catalog
         try:
                 catalog.ServerCatalog.recv(c, dl_dir)
-        except: 
+        except:
                 error(_("Error while reading from: %s") % server_url)
                 sys.exit(1)
 
@@ -186,7 +186,7 @@
 
         # instantiate catalog object
         cat = catalog.ServerCatalog(dl_dir, read_only=True)
-        
+
         # return (catalog, tmpdir path)
         return cat, dl_dir
 
@@ -201,7 +201,7 @@
                         d[f.pkg_name] = [f]
                 for k in d.keys():
                         d[k].sort(reverse = True)
-                catalog_dict[server_url] = d        
+        catalog_dict[server_url] = d
 
 def expand_fmri(server_url, fmri_string, constraint=version.CONSTRAINT_AUTO):
         """ from specified server, find matching fmri using CONSTRAINT_AUTO
@@ -209,7 +209,7 @@
         if server_url not in catalog_dict:
                 load_catalog(server_url)
 
-        fmri = pkg.fmri.PkgFmri(fmri_string, "5.11")        
+        fmri = pkg.fmri.PkgFmri(fmri_string, "5.11")
 
         for f in catalog_dict[server_url].get(fmri.pkg_name, []):
                 if not fmri.version or f.version.is_successor(fmri.version, constraint):
@@ -239,7 +239,7 @@
                                 _get_dependencies(s, server_url, new_fmri)
         return s
 
-        
+
 def main_func():
 
         basedir = None
@@ -253,7 +253,7 @@
         try:
                opts, pargs = getopt.getopt(sys.argv[1:], "d:nrv:")
         except getopt.GetoptError, e:
-                usage(_("Illegal option -- %s") % e.opt) 
+                usage(_("Illegal option -- %s") % e.opt)
 
         varlist = []
         recursive = False
@@ -268,19 +268,19 @@
                         recursive = True
                 if opt == "-n":
                         get_files = False
-                
-                
+
+
         if len(varlist) < 2:
                 usage(_("at least two -v arguments needed to merge"))
-        
+
         if not basedir:
                 basedir = os.getcwd()
 
         server_list = [
-                v.split(",", 1)[1]
-                for v in varlist
-                ]                
-        
+            v.split(",", 1)[1]
+            for v in varlist
+        ]
+
         if len(pargs) == 1:
                 recursive = False
                 overall_set = set()
@@ -292,6 +292,9 @@
         else:
                 fmri_arguments = pargs[1:]
 
+        if not pargs:
+                usage(_("you must specify a variant"))
+
         variant = "variant.%s" % pargs[0]
 
         variant_list = [
@@ -310,7 +313,7 @@
                                         q = str(d).rsplit(":", 1)[0]
                                         overall_set.add(q)
                 fmri_arguments = list(overall_set)
-        
+
         fmri_arguments.sort()
         print "Processing %d packages" % len(fmri_arguments)
 
@@ -340,20 +343,21 @@
                         error("No package of name %s in specified catalogs %s; ignoring." %\
                                       (fmri, server_list))
                         continue
-                        
+
                 merge_fmris(server_list, fmri_list, variant_list, variant, basedir, basename, get_files)
         cleanup_catalogs()
 
         return 0
 
-def merge_fmris(server_list, fmri_list, variant_list, variant, basedir, basename, get_files):
+def merge_fmris(server_list, fmri_list, variant_list, variant, basedir,
+    basename, get_files):
 
         manifest_list = [
                 get_manifest(s, f)
                 for s, f in zip(server_list, fmri_list)
                 ]
 
-        # remove variant tags and package variant metadata 
+        # remove variant tags and package variant metadata
         # from manifests since we're reassigning...
         # this allows merging pre-tagged packages
         for m in manifest_list:
@@ -365,13 +369,34 @@
 
         action_lists = manifest.Manifest.comm(*tuple(manifest_list))
 
+        # set fmri actions require special merge logic.
+        set_fmris = []
+        for l in action_lists:
+                for i, a in enumerate(l):
+                        if not (a.name == "set" and
+                            a.attrs["name"] == "pkg.fmri"):
+                                continue
+
+                        set_fmris.append(a)
+                        del l[i]
+
+        # If set fmris are present, then only the most recent one
+        # and add it back to the last action list.
+        if set_fmris:
+                def order(a, b):
+                        f1 = pkg.fmri.PkgFmri(a.attrs["value"], "5.11")
+                        f2 = pkg.fmri.PkgFmri(b.attrs["value"], "5.11")
+                        return cmp(f1, f2)
+                set_fmris.sort(cmp=order)
+                action_lists[-1].insert(0, set_fmris[-1])
+
         for a_list, v in zip(action_lists[0:-1], variant_list):
                 for a in a_list:
                         a.attrs[variant] = v
 
         # combine actions into single list
         allactions = reduce(lambda a,b:a + b, action_lists)
-        
+
         # figure out which variants are actually there for this pkg
         actual_variant_list = [
                 v
@@ -387,15 +412,15 @@
                       ]))))
 
         allactions.sort()
-                                                              
+
         m = manifest.Manifest()
         m.actions = allactions
-        
+
         # urlquote to avoid problems w/ fmris w/ '/' character in name
         basedir = os.path.join(basedir, urllib.quote(basename, ""))
         if not os.path.exists(basedir):
                 os.makedirs(basedir)
-                
+
         m_file = file(os.path.join(basedir, "manifest"), "w")
         m_file.write(m.tostr_unsorted())
         m_file.close()
@@ -408,7 +433,7 @@
         f_file.write(fmri)
         f_file.close()
 
-                
+
         if get_files:
                 # generate list of hashes for each server; last is commom
                 already_seen = {}
@@ -430,7 +455,7 @@
                         for action_list in action_lists
                         ]
                 # remove duplicate files (save time)
-                
+
                 for server, hash_set in zip(server_list + [server_list[0]], hash_sets):
                         if len(hash_set) > 0:
                                 fetch_files_byhash(server, hash_set, basedir)
--- a/src/web/en/base.shtml	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/web/en/base.shtml	Fri Oct 23 17:43:37 2009 -0500
@@ -28,7 +28,7 @@
 <%namespace name="shared" file="../shared.shtml" inheritable="True"/>\
 <%page args="g_vars"/>\
 <%
-        CLIENT_API_VERSION = 4
+        CLIENT_API_VERSION = 6
         base = g_vars["base"]
         catalog = api.CatalogInterface(CLIENT_API_VERSION, base)
         config = api.ConfigInterface(CLIENT_API_VERSION, base)
--- a/src/web/en/catalog.shtml	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/web/en/catalog.shtml	Fri Oct 23 17:43:37 2009 -0500
@@ -120,7 +120,7 @@
         if selected_val and selected_match:
                 ver = selected_match
                 pattern = "*,%s-%s" % (ver.build_release, ver.branch)
-                flist = catalog.get_matching_version_fmris(pattern)
+                flist, unmatched = catalog.get_matching_version_fmris(pattern)
         else:
                 flist = [f for f in catalog.fmris()]
                 flist.sort(reverse=True)
@@ -147,7 +147,7 @@
 
                 # Start FMRI entry
                 phref = self.shared.rpath(g_vars, "info/0/%s" % (
-                    urllib.quote(str(pfmri)[len("pkg:/"):], "")))
+                    urllib.quote(str(pfmri), "")))
                 # XXX the .p5i extension is a bogus hack because
                 # packagemanager requires it and shouldn't.
                 p5ihref = self.shared.rpath(g_vars, "p5i/0/%s.p5i" % (
--- a/src/web/en/search.shtml	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/web/en/search.shtml	Fri Oct 23 17:43:37 2009 -0500
@@ -64,6 +64,13 @@
                 except ValueError:
                         val = default
 
+                # Force boolean type for these parameters.
+                if name in ("cs", "sav"):
+                        if val:
+                                val = True
+                        else:
+                                val = False
+
                 criteria[name] = val
 
         return criteria
--- a/src/web/index.shtml	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/web/index.shtml	Fri Oct 23 17:43:37 2009 -0500
@@ -27,7 +27,7 @@
         import pkg.server.api as api
         import pkg.server.api_errors as api_errors
 
-        CLIENT_API_VERSION = 4
+        CLIENT_API_VERSION = 6
 %>\
 <%page args="g_vars"/>\
 <%
--- a/src/web/shared.shtml	Thu Oct 22 19:13:06 2009 +0100
+++ b/src/web/shared.shtml	Fri Oct 23 17:43:37 2009 -0500
@@ -25,10 +25,10 @@
 <%!
         import operator
 %>
-## Returns the value of the named respository configuration attribute in the
+## Returns the value of the named respository configuration property in the
 ## given section.
 <%def name="rcval(g_vars, section, name)"><%
-        return g_vars["config"].get_repo_attr_value(section, name)
+        return g_vars["config"].get_repo_property_value(section, name)
 %></%def>\
 ## Returns the relative URI path to the named resource.
 <%def name="rpath(g_vars, name)"><%
@@ -45,7 +45,7 @@
 <%def name="get_releases(g_vars)"><%
         catalog = g_vars["catalog"]
         request = g_vars["request"]
-        flist = catalog.get_matching_pattern_fmris("entire")
+        flist, unmatched = catalog.get_matching_pattern_fmris("entire")
         versions = {}
         for f in flist:
                 ver_string = "%s,%s-%s" % (f.version.release, f.version.release,