--- a/src/client.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/client.py Wed Jul 01 16:20:01 2015 -0700
@@ -56,6 +56,7 @@
import logging
import os
import re
+ import six
import socket
import sys
import tempfile
@@ -124,7 +125,7 @@
def error(text, cmd=None):
"""Emit an error message prefixed by the command name """
- if not isinstance(text, basestring):
+ if not isinstance(text, six.string_types):
# Assume it's an object that can be stringified.
text = str(text)
@@ -947,7 +948,7 @@
os.chmod(path, 0o644)
tmpfile = os.fdopen(fd, "w+b")
for a in plan.get_release_notes():
- if isinstance(a, unicode):
+ if isinstance(a, six.text_type):
a = a.encode("utf-8")
print(a, file=tmpfile)
tmpfile.close()
@@ -1412,7 +1413,7 @@
raise
if exc_value or exc_tb:
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
return rval
@@ -2202,7 +2203,7 @@
# Configured mediator information
gen_mediators = (
(mediator, mediation)
- for mediator, mediation in api_inst.mediators.iteritems()
+ for mediator, mediation in six.iteritems(api_inst.mediators)
)
# Set minimum widths for mediator and version columns by using the
@@ -3300,7 +3301,7 @@
# Determine if the query returned any results by "peeking" at the first
# value returned from the generator expression.
try:
- found = gen_expr.next()
+ found = next(gen_expr)
except StopIteration:
found = None
actionlist = []
@@ -3661,7 +3662,7 @@
if "Properties" not in pub:
continue
pub_items = sorted(
- pub["Properties"].iteritems())
+ six.iteritems(pub["Properties"]))
property_padding = " "
properties_displayed = False
for k, v in pub_items:
@@ -3670,7 +3671,7 @@
if not properties_displayed:
msg(_(" Properties:"))
properties_displayed = True
- if not isinstance(v, basestring):
+ if not isinstance(v, six.string_types):
v = ", ".join(sorted(v))
msg(property_padding, k + " =", str(v))
return retcode
@@ -4754,9 +4755,9 @@
if long_format:
data = __get_long_history_data(he, output)
for field, value in data:
- if isinstance(field, unicode):
+ if isinstance(field, six.text_type):
field = field.encode(enc)
- if isinstance(value, unicode):
+ if isinstance(value, six.text_type):
value = value.encode(enc)
msg("{0:>18}: {1}".format(field, value))
@@ -4766,7 +4767,7 @@
items = []
for col in columns:
item = output[col]
- if isinstance(item, unicode):
+ if isinstance(item, six.text_type):
item = item.encode(enc)
items.append(item)
msg(history_fmt.format(*items))
--- a/src/depot-config.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/depot-config.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,6 +32,7 @@
import os
import re
import shutil
+import six
import simplejson as json
import socket
import sys
@@ -102,11 +103,7 @@
DEPOT_GROUP = "pkg5srv"
class DepotException(Exception):
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
def error(text, cmd=None):
@@ -318,7 +315,7 @@
ssl_cert_chain_file=ssl_cert_chain_file
)
- with file(conf_path, "wb") as conf_file:
+ with open(conf_path, "wb") as conf_file:
conf_file.write(conf_text)
except socket.gaierror as err:
@@ -338,7 +335,7 @@
*DEPOT_VERSIONS_DIRNAME)
misc.makedirs(versions_path)
- with file(os.path.join(versions_path, "index.html"), "w") as \
+ with open(os.path.join(versions_path, "index.html"), "w") as \
versions_file:
versions_file.write(
fragment and DEPOT_FRAGMENT_VERSIONS_STR or
@@ -361,7 +358,7 @@
os.path.sep.join(
[repo_prefix, pub.prefix] + DEPOT_PUB_DIRNAME))
misc.makedirs(pub_path)
- with file(os.path.join(pub_path, "index.html"), "w") as\
+ with open(os.path.join(pub_path, "index.html"), "w") as\
pub_file:
p5i.write(pub_file, [pub])
@@ -369,7 +366,7 @@
pub_path = os.path.join(htdocs_path,
os.path.sep.join([repo_prefix] + DEPOT_PUB_DIRNAME))
os.makedirs(pub_path)
- with file(os.path.join(pub_path, "index.html"), "w") as \
+ with open(os.path.join(pub_path, "index.html"), "w") as \
pub_file:
p5i.write(pub_file, pub_objs)
@@ -383,7 +380,7 @@
status_path = os.path.join(htdocs_path, repo_prefix,
os.path.sep.join(DEPOT_STATUS_DIRNAME), "index.html")
misc.makedirs(os.path.dirname(status_path))
- with file(status_path, "w") as status_file:
+ with open(status_path, "w") as status_file:
status_file.write(json.dumps(status, ensure_ascii=False,
indent=2, sort_keys=True))
except OSError as err:
--- a/src/depot.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/depot.py Wed Jul 01 16:20:01 2015 -0700
@@ -73,16 +73,17 @@
import subprocess
import sys
import tempfile
-import urlparse
from imp import reload
+from six.moves.urllib.parse import urlparse, urlunparse
try:
import cherrypy
version = cherrypy.__version__.split('.')
- if map(int, version) < [3, 1, 0]:
+ # comparison requires same type, therefore list conversion is needed
+ if list(map(int, version)) < [3, 1, 0]:
raise ImportError
- elif map(int, version) >= [3, 2, 0]:
+ elif list(map(int, version)) >= [3, 2, 0]:
raise ImportError
except ImportError:
print("""cherrypy 3.1.0 or greater (but less than """
@@ -387,7 +388,7 @@
# remove any scheme information since we
# don't need it.
scheme, netloc, path, params, query, \
- fragment = urlparse.urlparse(arg,
+ fragment = urlparse(arg,
"http", allow_fragments=0)
if not netloc:
@@ -404,7 +405,7 @@
# Rebuild the url with the sanitized components.
ivalues["pkg"]["proxy_base"] = \
- urlparse.urlunparse((scheme, netloc, path,
+ urlunparse((scheme, netloc, path,
params, query, fragment))
elif opt == "--readonly":
ivalues["pkg"]["readonly"] = True
@@ -694,7 +695,7 @@
# exec-based authentication, so it will have to be decoded first
# to an un-named temporary file.
try:
- with file(ssl_key_file, "rb") as key_file:
+ with open(ssl_key_file, "rb") as key_file:
pkey = crypto.load_privatekey(
crypto.FILETYPE_PEM, key_file.read(),
get_ssl_passphrase)
--- a/src/modules/actions/__init__.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/__init__.py Wed Jul 01 16:20:01 2015 -0700
@@ -89,12 +89,6 @@
class ActionError(Exception):
"""Base exception class for Action errors."""
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
raise NotImplementedError()
--- a/src/modules/actions/attribute.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/attribute.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import generic
import pkg.fmri
import pkg.actions
+import six
class AttributeAction(generic.Action):
"""Class representing a package attribute."""
@@ -84,7 +85,7 @@
if isinstance(self.attrs["value"], list):
tmp = []
for v in self.attrs["value"]:
- assert isinstance(v, basestring)
+ assert isinstance(v, six.string_types)
if " " in v:
words = v.split()
for w in words:
--- a/src/modules/actions/depend.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/depend.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import generic
import re
+import six
import pkg.actions
import pkg.client.pkgdefs as pkgdefs
@@ -318,10 +319,10 @@
# it creating a dummy timestamp. So we have to split it apart
# manually.
#
- if isinstance(pfmris, basestring):
+ if isinstance(pfmris, six.string_types):
pfmris = [pfmris]
inds = []
- pat = re.compile(r"pkg:///|pkg://[^/]*/|pkg:/")
+ pat = re.compile(r"pkg:///|pkg://[^/]*/|pkg:/")
for p in pfmris:
# Strip pkg:/ or pkg:/// from the fmri.
# If fmri has pkg:// then strip the prefix
@@ -418,7 +419,7 @@
# Now build the action output string an attribute at a
# time.
- for k, v in sorted(act.attrs.iteritems(), cmp=cmpkv):
+ for k, v in sorted(six.iteritems(act.attrs), cmp=cmpkv):
# Newline breaks are only forced when there is
# more than one value for an attribute.
if not (isinstance(v, list) or
--- a/src/modules/actions/driver.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/driver.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,9 +32,11 @@
from __future__ import print_function
import os
+import generic
+import six
+
from tempfile import mkstemp
-import generic
import pkg.pkgsubprocess as subprocess
from pkg.client.debugvalues import DebugValues
@@ -206,7 +208,7 @@
# the aliases file. What's left is what we should be
# checking for dups against, along with the rest of the
# drivers.
- for name in driver_actions.iterkeys():
+ for name in driver_actions:
file_db.pop(name, None)
# Build a mapping of aliases to driver names based on
@@ -214,14 +216,14 @@
a2d = {}
for alias, name in (
(a, n)
- for n, act_list in driver_actions.iteritems()
+ for n, act_list in six.iteritems(driver_actions)
for act in act_list
for a in act.attrlist("alias")
):
a2d.setdefault(alias, set()).add(name)
# Enhance that mapping with data from driver_aliases.
- for name, aliases in file_db.iteritems():
+ for name, aliases in six.iteritems(file_db):
for alias in aliases:
a2d.setdefault(alias, set()).add(name)
@@ -353,7 +355,7 @@
if "devlink" in self.attrs:
dlp = os.path.normpath(os.path.join(
image.get_root(), "etc/devlink.tab"))
- dlf = file(dlp)
+ dlf = open(dlp)
dllines = dlf.readlines()
dlf.close()
st = os.stat(dlp)
@@ -421,7 +423,7 @@
image.get_root(), "etc/driver_classes"))
try:
- dcf = file(dcp, "r")
+ dcf = open(dcp, "r")
lines = dcf.readlines()
dcf.close()
except IOError as e:
@@ -440,7 +442,7 @@
self.attrs["name"], i)]
try:
- dcf = file(dcp, "w")
+ dcf = open(dcp, "w")
dcf.writelines(lines)
dcf.close()
except IOError as e:
@@ -465,7 +467,7 @@
image.get_root(), "etc/devlink.tab"))
try:
- dlf = file(dlp)
+ dlf = open(dlp)
lines = dlf.readlines()
dlf.close()
st = os.stat(dlp)
@@ -619,7 +621,7 @@
def __gen_read_binding_file(img, path, minfields=None, maxfields=None,
raw=False):
- myfile = file(os.path.normpath(os.path.join(
+ myfile = open(os.path.normpath(os.path.join(
img.get_root(), path)))
for line in myfile:
line = line.strip()
@@ -754,7 +756,7 @@
# Grab device policy
try:
- dpf = file(os.path.normpath(os.path.join(
+ dpf = open(os.path.normpath(os.path.join(
img.get_root(), "etc/security/device_policy")))
except IOError as e:
e.args += ("etc/security/device_policy",)
@@ -793,7 +795,7 @@
# Grab device privileges
try:
- dpf = file(os.path.normpath(os.path.join(
+ dpf = open(os.path.normpath(os.path.join(
img.get_root(), "etc/security/extra_privs")))
except IOError as e:
e.args += ("etc/security/extra_privs",)
@@ -938,7 +940,7 @@
image.get_root(), "etc/devlink.tab"))
try:
- dlf = file(dlp)
+ dlf = open(dlp)
lines = dlf.readlines()
dlf.close()
st = os.stat(dlp)
--- a/src/modules/actions/generic.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/generic.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,11 +32,13 @@
from cStringIO import StringIO
import errno
import os
+
try:
# Some versions of python don't have these constants.
os.SEEK_SET
except AttributeError:
os.SEEK_SET, os.SEEK_CUR, os.SEEK_END = range(3)
+import six
import stat
import types
@@ -134,7 +136,8 @@
return pkg.actions.fromstr(state)
-class Action(object):
+# metaclass-assignment; pylint: disable=W1623
+class Action(six.with_metaclass(NSG, object)):
"""Class representing a generic packaging object.
An Action is a very simple wrapper around two dictionaries: a named set
@@ -177,6 +180,8 @@
# Most types of actions do not have a payload.
has_payload = False
+ # Python 3 will ignore the __metaclass__ field, but it's still useful
+ # for class attribute access.
__metaclass__ = NSG
# __init__ is provided as a native function (see end of class
@@ -194,7 +199,7 @@
self.data = None
return
- if isinstance(data, basestring):
+ if isinstance(data, six.string_types):
if not os.path.exists(data):
raise pkg.actions.ActionDataError(
_("No such file: '{0}'.").format(data),
@@ -271,7 +276,7 @@
computed. This may need to be done externally.
"""
- sattrs = self.attrs.keys()
+ sattrs = list(self.attrs.keys())
out = self.name
try:
h = self.hash
@@ -415,12 +420,12 @@
# same.
sattrs = self.attrs
oattrs = other.attrs
- sset = set(sattrs.iterkeys())
- oset = set(oattrs.iterkeys())
+ sset = set(six.iterkeys(sattrs))
+ oset = set(six.iterkeys(oattrs))
if sset.symmetric_difference(oset):
return True
- for a, x in sattrs.iteritems():
+ for a, x in six.iteritems(sattrs):
y = oattrs[a]
if x != y:
if len(x) == len(y) and \
@@ -468,7 +473,7 @@
def consolidate_attrs(self):
"""Removes duplicate values from values which are lists."""
- for k in self.attrs.iterkeys():
+ for k in self.attrs:
if isinstance(self.attrs[k], list):
self.attrs[k] = list(set(self.attrs[k]))
@@ -1114,7 +1119,7 @@
for attr in required_attrs:
val = self.attrs.get(attr)
if not val or \
- (isinstance(val, basestring) and not val.strip()):
+ (isinstance(val, six.string_types) and not val.strip()):
errors.append((attr,
_("{0} is required").format(attr)))
--- a/src/modules/actions/group.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/group.py Wed Jul 01 16:20:01 2015 -0700
@@ -31,6 +31,7 @@
a new user."""
import generic
+import six
try:
from pkg.cfgfiles import *
have_cfgfiles = True
@@ -129,7 +130,7 @@
# Get the default values if they're non-empty
grdefval = dict((
(k, v)
- for k, v in gr.getdefaultvalues().iteritems()
+ for k, v in six.iteritems(gr.getdefaultvalues())
if v != ""
))
--- a/src/modules/actions/legacy.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/legacy.py Wed Jul 01 16:20:01 2015 -0700
@@ -112,7 +112,7 @@
# need them) appear.
try:
- file(os.path.join(pkgplan.image.get_root(),
+ open(os.path.join(pkgplan.image.get_root(),
"var/sadm/install/contents"), "a").close()
except IOError as e:
if e.errno != errno.ENOENT:
--- a/src/modules/actions/license.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/license.py Wed Jul 01 16:20:01 2015 -0700
@@ -39,10 +39,10 @@
import pkg.digest as digest
import pkg.misc as misc
import pkg.portable as portable
-import urllib
import zlib
from pkg.client.api_errors import ActionExecutionError
+from six.moves.urllib.parse import quote
class LicenseAction(generic.Action):
"""Class representing a license packaging object."""
@@ -68,7 +68,7 @@
# the path must be relative to the root of the image.
self.attrs["path"] = misc.relpath(os.path.join(
pkgplan.image.get_license_dir(pkgplan.destination_fmri),
- "license." + urllib.quote(self.attrs["license"], "")),
+ "license." + quote(self.attrs["license"], "")),
pkgplan.image.get_root())
def install(self, pkgplan, orig):
@@ -92,7 +92,7 @@
elif os.path.exists(path):
os.chmod(path, misc.PKG_FILE_MODE)
- lfile = file(path, "wb")
+ lfile = open(path, "wb")
try:
hash_attr, hash_val, hash_func = \
digest.get_preferred_hash(self)
@@ -138,7 +138,7 @@
info = []
path = os.path.join(img.get_license_dir(pfmri),
- "license." + urllib.quote(self.attrs["license"], ""))
+ "license." + quote(self.attrs["license"], ""))
hash_attr, hash_val, hash_func = \
digest.get_preferred_hash(self)
@@ -162,7 +162,7 @@
def remove(self, pkgplan):
path = os.path.join(
pkgplan.image.get_license_dir(pkgplan.origin_fmri),
- "license." + urllib.quote(self.attrs["license"], ""))
+ "license." + quote(self.attrs["license"], ""))
try:
# Make file writable so it can be deleted
@@ -238,7 +238,7 @@
# Newer images ensure licenses are stored with encoded
# name so that '/', spaces, etc. are properly handled.
path = os.path.join(img.get_license_dir(pfmri),
- "license." + urllib.quote(self.attrs["license"],
+ "license." + quote(self.attrs["license"],
""))
return path
--- a/src/modules/actions/link.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/link.py Wed Jul 01 16:20:01 2015 -0700
@@ -31,6 +31,7 @@
import errno
import os
+import six
import stat
import generic
@@ -116,7 +117,7 @@
if "mediator" in self.attrs:
rval.extend(
(self.name, k, v, None)
- for k, v in self.attrs.iteritems()
+ for k, v in six.iteritems(self.attrs)
if k.startswith("mediator")
)
return rval
--- a/src/modules/actions/user.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/actions/user.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,6 +32,7 @@
import errno
import generic
+import six
try:
from pkg.cfgfiles import *
have_cfgfiles = True
@@ -242,7 +243,7 @@
# Get the default values if they're non-empty
pwdefval = dict((
(k, v)
- for k, v in pw.getdefaultvalues().iteritems()
+ for k, v in six.iteritems(pw.getdefaultvalues())
if v != ""
))
--- a/src/modules/api_common.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/api_common.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,6 +29,8 @@
"""Contains API functions and classes common to both pkg.client.api and
pkg.server.api."""
+import six
+
import pkg.client.pkgdefs as pkgdefs
import pkg.fmri as fmri
import pkg.misc as misc
@@ -191,10 +193,10 @@
# of an action's modifiers?
if isinstance(modifiers, dict):
modifiers = tuple(
- (k, isinstance(modifiers[k], basestring) and
+ (k, isinstance(modifiers[k], six.string_types) and
tuple([sorted(modifiers[k])]) or
tuple(sorted(modifiers[k])))
- for k in sorted(modifiers.iterkeys())
+ for k in sorted(six.iterkeys(modifiers))
)
return self.attrs.get(name, {modifiers: []}).get(
modifiers, [])
--- a/src/modules/bundle/SolarisPackageDirBundle.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/bundle/SolarisPackageDirBundle.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,14 +25,16 @@
#
import os
+import six
+
import pkg.bundle
import pkg.misc as misc
-from pkg.sysvpkg import SolarisPackage
-from pkg.cpiofile import CpioFile
from pkg.actions import *
from pkg.actions.attribute import AttributeAction
from pkg.actions.legacy import LegacyAction
+from pkg.cpiofile import CpioFile
+from pkg.sysvpkg import SolarisPackage
class SolarisPackageDirBundle(pkg.bundle.Bundle):
@@ -183,7 +185,7 @@
act.attrs["preserve"] = preserve
if act.hash == "NOHASH" and \
- isinstance(data, basestring) and \
+ isinstance(data, six.string_types) and \
data.startswith(self.filename):
act.hash = data[len(self.filename) + 1:]
elif mapline.type in "dx":
@@ -200,7 +202,7 @@
act = license.LicenseAction(data,
license="{0}.copyright".format(self.pkgname))
if act.hash == "NOHASH" and \
- isinstance(data, basestring) and \
+ isinstance(data, six.string_types) and \
data.startswith(self.filename):
act.hash = data[len(self.filename) + 1:]
elif mapline.type == "i":
--- a/src/modules/bundle/__init__.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/bundle/__init__.py Wed Jul 01 16:20:01 2015 -0700
@@ -40,11 +40,7 @@
import sys
class InvalidBundleException(Exception):
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
class InvalidOwnershipException(InvalidBundleException):
--- a/src/modules/catalog.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/catalog.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,11 +33,14 @@
import hashlib
import os
import simplejson as json
+import six
import stat
import statvfs
import threading
import types
+from operator import itemgetter
+
import pkg.actions
import pkg.client.api_errors as api_errors
import pkg.client.pkgdefs as pkgdefs
@@ -46,7 +49,6 @@
import pkg.portable as portable
import pkg.version
-from operator import itemgetter
from pkg.misc import EmptyDict, EmptyI
class _JSONWriter(object):
@@ -169,7 +171,7 @@
hash_func=hashlib.sha1)[0]
# Open the JSON file so that the signature data can be added.
- sfile = file(self.pathname, "rb+", self.__bufsz)
+ sfile = open(self.pathname, "rb+", self.__bufsz)
# The last bytes should be "}\n", which is where the signature
# data structure needs to be appended.
@@ -300,7 +302,7 @@
location = os.path.join(self.meta_root, self.name)
try:
- fobj = file(location, "rb")
+ fobj = open(location, "rb")
except EnvironmentError as e:
if e.errno == errno.ENOENT:
raise api_errors.RetrievalError(e,
@@ -559,7 +561,7 @@
entries.setdefault(sver, [])
entries[sver].append((pfmri, entry))
- for key, ver in sorted(versions.iteritems(), key=itemgetter(1)):
+ for key, ver in sorted(six.iteritems(versions), key=itemgetter(1)):
yield ver, entries[key]
def fmris(self, last=False, objects=True, ordered=False, pubs=EmptyI):
@@ -626,7 +628,7 @@
entries.setdefault(sver, [])
entries[sver].append(pfmri)
- for key, ver in sorted(versions.iteritems(), key=itemgetter(1)):
+ for key, ver in sorted(six.iteritems(versions), key=itemgetter(1)):
yield ver, entries[key]
def get_entry(self, pfmri=None, pub=None, stem=None, ver=None):
@@ -1203,7 +1205,7 @@
# Use a copy to prevent the in-memory version from being
# affected by the transformations.
struct = copy.deepcopy(self.__data)
- for key, val in struct.iteritems():
+ for key, val in six.iteritems(struct):
if isinstance(val, datetime.datetime):
# Convert datetime objects to an ISO-8601
# basic format string.
@@ -1240,7 +1242,7 @@
except ValueError:
raise api_errors.InvalidCatalogFile(location)
- for key, val in struct.iteritems():
+ for key, val in six.iteritems(struct):
if key in ("created", "last-modified"):
# Convert ISO-8601 basic format strings to
# datetime objects. These dates can be
@@ -1554,7 +1556,7 @@
parts.append(part)
def merge_entry(src, dest):
- for k, v in src.iteritems():
+ for k, v in six.iteritems(src):
if k == "actions":
dest.setdefault(k, [])
dest[k] += v
@@ -1574,7 +1576,7 @@
# Part doesn't have this FMRI,
# so skip it.
continue
- for k, v in entry.iteritems():
+ for k, v in six.iteritems(entry):
if k == "actions":
mdata.setdefault(k, [])
mdata[k] += v
@@ -1593,7 +1595,7 @@
# Part doesn't have this FMRI,
# so skip it.
continue
- for k, v in entry.iteritems():
+ for k, v in six.iteritems(entry):
if k == "actions":
mdata.setdefault(k, [])
mdata[k] += v
@@ -1794,7 +1796,7 @@
parts[pname] = entries[pname]
logdate = datetime_to_update_ts(op_time)
- for locale, metadata in updates.iteritems():
+ for locale, metadata in six.iteritems(updates):
name = "update.{0}.{1}".format(logdate, locale)
ulog = self.__get_update(name)
ulog.add(pfmri, operation, metadata=metadata,
@@ -1803,7 +1805,7 @@
"last-modified": op_time
}
- for name, part in self.__parts.iteritems():
+ for name, part in six.iteritems(self.__parts):
# Signature data for each part needs to be cleared,
# and will only be available again after save().
attrs.parts[name] = {
@@ -1884,7 +1886,7 @@
attrs = self._attrs
if self.log_updates:
- for name, ulog in self.__updates.iteritems():
+ for name, ulog in six.iteritems(self.__updates):
ulog.save()
# Replace the existing signature data
@@ -1892,13 +1894,13 @@
entry = attrs.updates[name] = {
"last-modified": ulog.last_modified
}
- for n, v in ulog.signatures.iteritems():
+ for n, v in six.iteritems(ulog.signatures):
entry["signature-{0}".format(n)] = v
# Save any CatalogParts that are currently in-memory,
# updating their related information in catalog.attrs
# as they are saved.
- for name, part in self.__parts.iteritems():
+ for name, part in six.iteritems(self.__parts):
# Must save first so that signature data is
# current.
@@ -1915,7 +1917,7 @@
entry = attrs.parts[name] = {
"last-modified": part.last_modified
}
- for n, v in part.signatures.iteritems():
+ for n, v in six.iteritems(part.signatures):
entry["signature-{0}".format(n)] = v
# Finally, save the catalog attributes.
@@ -2151,7 +2153,7 @@
if metadata:
entry["metadata"] = metadata
if manifest:
- for k, v in manifest.signatures.iteritems():
+ for k, v in six.iteritems(manifest.signatures):
entry["signature-{0}".format(k)] = v
part = self.get_part(self.__BASE_PART)
entries[part.name] = part.add(pfmri, metadata=entry,
@@ -2253,7 +2255,7 @@
# (Which is why __get_update is not used.)
ulog = CatalogUpdate(name, meta_root=path)
for pfmri, op_type, op_time, metadata in ulog.updates():
- for pname, pdata in metadata.iteritems():
+ for pname, pdata in six.iteritems(metadata):
part = self.get_part(pname,
must_exist=True)
if part is None:
@@ -2305,7 +2307,7 @@
# signature that matches the new catalog.attrs file.
new_attrs = CatalogAttrs(meta_root=path)
new_sigs = {}
- for name, mdata in new_attrs.parts.iteritems():
+ for name, mdata in six.iteritems(new_attrs.parts):
new_sigs[name] = {}
for key in mdata:
if not key.startswith("signature-"):
@@ -2318,7 +2320,7 @@
self.batch_mode = old_batch_mode
self.finalize()
- for name, part in self.__parts.iteritems():
+ for name, part in six.iteritems(self.__parts):
part.validate(signatures=new_sigs[name])
# Finally, save the catalog, and then copy the new
@@ -2525,7 +2527,7 @@
parts.append(part)
def merge_entry(src, dest):
- for k, v in src.iteritems():
+ for k, v in six.iteritems(src):
if k == "actions":
dest.setdefault(k, [])
dest[k] += v
@@ -2741,7 +2743,7 @@
"""
def merge_entry(src, dest):
- for k, v in src.iteritems():
+ for k, v in six.iteritems(src):
if k == "actions":
dest.setdefault(k, [])
dest[k] += v
@@ -2885,7 +2887,7 @@
raise api_errors.UnknownCatalogEntry(pfmri.get_fmri())
return (
(k.split("signature-")[1], v)
- for k, v in entry.iteritems()
+ for k, v in six.iteritems(entry)
if k.startswith("signature-")
)
@@ -3109,7 +3111,7 @@
# structure sanely somewhere.
mods = frozenset(
(k, frozenset(a.attrlist(k)))
- for k in a.attrs.iterkeys()
+ for k in six.iterkeys(a.attrs)
if k not in ("name", "value")
)
attrs[atname][mods].extend(
@@ -3341,7 +3343,7 @@
# fmri matches.
proposed_dict = {}
for d in ret.values():
- for k, l in d.iteritems():
+ for k, l in six.iteritems(d):
proposed_dict.setdefault(k, []).extend(l)
# construct references so that we can know which pattern
@@ -3534,7 +3536,7 @@
if not last_lm or lm > last_lm:
last_lm = lm
- for name, uattrs in new_attrs.updates.iteritems():
+ for name, uattrs in six.iteritems(new_attrs.updates):
up_lm = uattrs["last-modified"]
# The last component of the update name is the
@@ -3853,7 +3855,7 @@
return None
return sigs
- for name, mdata in self._attrs.parts.iteritems():
+ for name, mdata in six.iteritems(self._attrs.parts):
part = self.get_part(name, must_exist=True)
if part is None:
# Part does not exist; no validation needed.
@@ -3861,7 +3863,7 @@
part.validate(signatures=get_sigs(mdata),
require_signatures=require_signatures)
- for name, mdata in self._attrs.updates.iteritems():
+ for name, mdata in six.iteritems(self._attrs.updates):
ulog = self.__get_update(name, cache=False,
must_exist=True)
if ulog is None:
--- a/src/modules/cfgfiles.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/cfgfiles.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,6 +32,7 @@
import errno
import os
import re
+import six
import stat
import sys
import tempfile
@@ -98,7 +99,7 @@
while self.continuation_lines and line[-2:] == "\\\n":
linecnt += 1
- line += file.next()
+ line += next(file)
line = line.rstrip("\n")
if self.iscommentline(line):
@@ -270,7 +271,7 @@
def getnextuid(self):
"""returns next free system (<=99) uid"""
uids=[]
- for t in self.password_file.index.itervalues():
+ for t in six.itervalues(self.password_file.index):
if t[1]:
uids.append(t[1]["uid"])
for i in range(100):
@@ -324,7 +325,7 @@
def getnextgid(self):
"""returns next free system (<=99) gid"""
gids=[]
- for t in self.index.itervalues():
+ for t in six.itervalues(self.index):
if t[1]:
gids.append(t[1]["gid"])
for i in range(100):
--- a/src/modules/client/actuator.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/actuator.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,6 +32,7 @@
from pkg.client.debugvalues import DebugValues
from pkg.client.imagetypes import IMG_USER, IMG_ENTIRE
+import six
class Actuator(object):
@@ -144,11 +145,15 @@
def timed_out(self):
return self.act_timed_out
+ # Defining "boolness" of a class, Python 2 uses the special method
+ # called __nonzero__() while Python 3 uses __bool__(). For Python
+ # 2 and 3 compatibility, define __bool__() only, and let
+ # __nonzero__ = __bool__
def __bool__(self):
- return self.install or self.removal or self.update
+ return bool(self.install) or bool(self.removal) or \
+ bool(self.update)
- def __nonzero__(self):
- return bool(self.install or self.removal or self.update)
+ __nonzero__ = __bool__
# scan_* functions take ActionPlan arguments (see imageplan.py)
def scan_install(self, ap):
@@ -333,15 +338,15 @@
# handle callables first
- for act in self.removal.itervalues():
+ for act in six.itervalues(self.removal):
if hasattr(act, "__call__"):
act()
- for act in self.install.itervalues():
+ for act in six.itervalues(self.install):
if hasattr(act, "__call__"):
act()
- for act in self.update.itervalues():
+ for act in six.itervalues(self.update):
if hasattr(act, "__call__"):
act()
--- a/src/modules/client/api.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/api.py Wed Jul 01 16:20:01 2015 -0700
@@ -64,11 +64,18 @@
import os
import shutil
import simplejson as json
+import six
import sys
import tempfile
import threading
import time
-import urllib
+
+# Pylint seems to be panic about six even if it is installed. Instead of using
+# 'disable' here, a better way is to use ignore-modules in pylintrc, but
+# it has an issue that is not fixed until recently. See pylint/issues/#223.
+# import-error; pylint: disable=F0401
+# no-name-in-module; pylint: disable=E0611
+from six.moves.urllib.parse import unquote
import pkg.catalog as catalog
import pkg.client.api_errors as apx
@@ -362,7 +369,7 @@
override cmdpath when allocating an Image object by setting PKG_CMDPATH
in the environment or by setting simulate_cmdpath in DebugValues.""")
- if isinstance(img_path, basestring):
+ if isinstance(img_path, six.string_types):
# Store this for reset().
self._img_path = img_path
self._img = image.Image(img_path,
@@ -551,7 +558,7 @@
"""
ret = {}
- for m, mvalues in self._img.cfg.mediators.iteritems():
+ for m, mvalues in six.iteritems(self._img.cfg.mediators):
ret[m] = copy.copy(mvalues)
if "version" in ret[m]:
# Don't expose internal Version object to
@@ -836,7 +843,7 @@
def get_avoid_list(self):
"""Return list of tuples of (pkg stem, pkgs w/ group
dependencies on this) """
- return [a for a in self._img.get_avoid_dict().iteritems()]
+ return [a for a in six.iteritems(self._img.get_avoid_dict())]
def gen_facets(self, facet_list, implicit=False, patterns=misc.EmptyI):
"""A generator function that produces tuples of the form:
@@ -1143,7 +1150,7 @@
# re-raise the original exception. (we have to explicitly
# restate the original exception since we may have cleared the
# current exception scope above.)
- raise exc_type, exc_value, exc_traceback
+ six.reraise(exc_type, exc_value, exc_traceback)
def solaris_image(self):
"""Returns True if the current image is a solaris image, or an
@@ -1217,9 +1224,9 @@
# arg name type nullable
"_act_timeout": (int, False),
"_be_activate": (bool, False),
- "_be_name": (basestring, True),
+ "_be_name": (six.string_types, True),
"_backup_be": (bool, True),
- "_backup_be_name": (basestring, True),
+ "_backup_be_name": (six.string_types, True),
"_ignore_missing": (bool, False),
"_ipkg_require_latest": (bool, False),
"_li_erecurse": (iter, True),
@@ -3242,7 +3249,7 @@
ret_pubs = []
for repo_uri in repos:
- if isinstance(repo_uri, basestring):
+ if isinstance(repo_uri, six.string_types):
repo = publisher.RepositoryURI(repo_uri)
else:
# Already a RepositoryURI.
@@ -3446,7 +3453,7 @@
# copy() is too slow here and catalog
# entries are shallow so this should be
# sufficient.
- entry = dict(sentry.iteritems())
+ entry = dict(six.iteritems(sentry))
if not base:
# Nothing else to do except add
# the entry for non-base catalog
@@ -4081,7 +4088,7 @@
# structure sanely somewhere.
mods = tuple(
(k, tuple(sorted(a.attrlist(k))))
- for k in sorted(a.attrs.iterkeys())
+ for k in sorted(six.iterkeys(a.attrs))
if k not in ("name", "value")
)
attrs[atname][mods].extend(atvlist)
@@ -4128,7 +4135,7 @@
# image, elide packages that are not for
# a matching variant value.
is_list = type(atvalue) == list
- for vn, vv in img_variants.iteritems():
+ for vn, vv in six.iteritems(img_variants):
if vn == atname and \
((is_list and
vv not in atvalue) or \
@@ -4718,7 +4725,7 @@
subfields = fields[2].split(None, 2)
pfmri = fmri.PkgFmri(subfields[0])
return pfmri, (query_num, pub, (v, return_type,
- (pfmri, urllib.unquote(subfields[1]),
+ (pfmri, unquote(subfields[1]),
subfields[2])))
elif return_type == Query.RETURN_PACKAGES:
pfmri = fmri.PkgFmri(fields[2])
@@ -4985,7 +4992,7 @@
search is really communicating with a search-enabled server."""
try:
- s = res.next()
+ s = next(res)
return s == Query.VALIDATION_STRING[v]
except StopIteration:
return False
@@ -5196,7 +5203,7 @@
# First, attempt to match the updated publisher object to an
# existing one using the object id that was stored during
# copy().
- for key, old in publishers.iteritems():
+ for key, old in six.iteritems(publishers):
if pub._source_object_id == id(old):
# Store the new publisher's id and the old
# publisher object so it can be restored if the
@@ -5212,7 +5219,7 @@
# Next, be certain that the publisher's prefix and alias
# are not already in use by another publisher.
- for key, old in publishers.iteritems():
+ for key, old in six.iteritems(publishers):
if pub._source_object_id == id(old):
# Don't check the object we're replacing.
continue
@@ -5225,7 +5232,7 @@
# Next, determine what needs updating and add the updated
# publisher.
- for key, old in publishers.iteritems():
+ for key, old in six.iteritems(publishers):
if pub._source_object_id == id(old):
old = orig_pub[-1]
if need_refresh(old, pub):
@@ -5250,7 +5257,7 @@
def cleanup():
new_id, old_pub = orig_pub
- for new_pfx, new_pub in publishers.iteritems():
+ for new_pfx, new_pub in six.iteritems(publishers):
if id(new_pub) == new_id:
publishers[old_pub.prefix] = old_pub
break
--- a/src/modules/client/api_errors.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/api_errors.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,8 +27,9 @@
import errno
import operator
import os
+import six
import xml.parsers.expat as expat
-import urlparse
+from six.moves.urllib.parse import urlsplit
# pkg classes
import pkg.client.pkgdefs as pkgdefs
@@ -42,12 +43,6 @@
Exception.__init__(self)
self.__verbose_info = []
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return unicode(str(self))
-
def add_verbose_info(self, info):
self.__verbose_info.extend(info)
@@ -595,7 +590,7 @@
if self.invalid_mediations:
s = _("The following mediations are not syntactically "
"valid:")
- for m, entries in self.invalid_mediations.iteritems():
+ for m, entries in six.iteritems(self.invalid_mediations):
for value, error in entries.values():
res.append(error)
@@ -777,7 +772,7 @@
if len(pfmris) > 1:
s = _("The following packages deliver conflicting "
"action types to {0}:\n").format(kv)
- for name, pl in ad.iteritems():
+ for name, pl in six.iteritems(ad):
s += "\n {0}:".format(name)
s += "".join("\n {0}".format(p) for p in pl)
s += _("\n\nThese packages may not be installed together. "
@@ -785,7 +780,7 @@
"must be corrected before they can be installed.")
else:
pfmri = pfmris.pop()
- types = list_to_lang(ad.keys())
+ types = list_to_lang(list(ad.keys()))
s = _("The package {pfmri} delivers conflicting "
"action types ({types}) to {kv}").format(**locals())
s += _("\nThis package must be corrected before it "
@@ -808,7 +803,7 @@
def ou(action):
ua = dict(
(k, v)
- for k, v in action.attrs.iteritems()
+ for k, v in six.iteritems(action.attrs)
if ((k in action.unique_attrs and
not (k == "preserve" and "overlay" in action.attrs)) or
((action.name == "link" or action.name == "hardlink") and
@@ -823,7 +818,7 @@
d.setdefault(str(ou(a[0])), set()).add(a[1])
l = sorted([
(len(pkglist), action, pkglist)
- for action, pkglist in d.iteritems()
+ for action, pkglist in six.iteritems(d)
])
s = _("The requested change to the system attempts to install "
@@ -2096,7 +2091,7 @@
unsupported scheme."""
def __init__(self, uris=[]):
- if isinstance(uris, basestring):
+ if isinstance(uris, six.string_types):
uris = [uris]
assert isinstance(uris, (list, tuple, set))
@@ -2107,8 +2102,8 @@
illegals = []
for u in self.uris:
- assert isinstance(u, basestring)
- scheme = urlparse.urlsplit(u,
+ assert isinstance(u, six.string_types)
+ scheme = urlsplit(u,
allow_fragments=0)[0]
illegals.append((u, scheme))
@@ -2145,7 +2140,7 @@
def __str__(self):
if self.data:
- scheme = urlparse.urlsplit(self.data,
+ scheme = urlsplit(self.data,
allow_fragments=0)[0]
return _("The proxy URI '{uri}' uses the unsupported "
"scheme '{scheme}'. Supported schemes are "
--- a/src/modules/client/client_api.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/client_api.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,6 +34,7 @@
import simplejson as json
import os
import re
+import six
import socket
import sys
import tempfile
@@ -42,6 +43,8 @@
import traceback
import jsonschema
+from six.moves import filter, map, range
+
import pkg
import pkg.actions as actions
import pkg.client.api as api
@@ -81,10 +84,10 @@
if isinstance(input, dict):
return dict([(_byteify(key), _byteify(value)) for key, value in
- input.iteritems()])
+ six.iteritems(input)])
elif isinstance(input, list):
return [_byteify(element) for element in input]
- elif isinstance(input, unicode):
+ elif isinstance(input, six.text_type):
return input.encode('utf-8')
else:
return input
@@ -380,7 +383,7 @@
def _error_json(text, cmd=None, errors_json=None, errorType=None):
"""Prepare an error message for json output. """
- if not isinstance(text, basestring):
+ if not isinstance(text, six.string_types):
# Assume it's an object that can be stringified.
text = str(text)
@@ -1124,7 +1127,7 @@
raise
if exc_value or exc_tb:
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
return rval
@@ -2037,8 +2040,8 @@
# Extract our list of headers from the field_data
# dictionary Make sure they are extracted in the
# desired order by using our custom sort function.
- hdrs = map(get_header, sorted(filter(filter_func,
- field_data.values()), sort_fields))
+ hdrs = list(map(get_header, sorted(filter(filter_func,
+ list(field_data.values())), sort_fields)))
if not omit_headers:
data["headers"] = hdrs
@@ -2119,7 +2122,7 @@
)
entry = []
for e in values:
- if isinstance(e, basestring):
+ if isinstance(e, six.string_types):
entry.append(e)
else:
entry.append(str(e))
@@ -2153,7 +2156,7 @@
)
entry = []
for e in values:
- if isinstance(e, basestring):
+ if isinstance(e, six.string_types):
entry.append(e)
else:
entry.append(str(e))
@@ -2170,7 +2173,7 @@
)
entry = []
for e in values:
- if isinstance(e, basestring):
+ if isinstance(e, six.string_types):
entry.append(e)
else:
entry.append(str(e))
@@ -2263,7 +2266,7 @@
pub_data["Enabled"] = "Yes"
if pub.properties:
pub_data["Properties"] = {}
- for k, v in pub.properties.iteritems():
+ for k, v in six.iteritems(pub.properties):
pub_data["Properties"][k] = v
if "publisher_details" not in data:
data["publisher_details"] = [pub_data]
@@ -2797,18 +2800,18 @@
else:
pargs = json.loads(pargs_json)
if not isinstance(pargs, list):
- if not isinstance(pargs, basestring):
+ if not isinstance(pargs, six.string_types):
err = {"reason": "{0} is invalid.".format(
arg_name)}
errors_json.append(err)
return None, __prepare_json(EXIT_OOPS,
errors=errors_json)
- if isinstance(pargs, unicode):
+ if isinstance(pargs, six.text_type):
pargs = pargs.encode("utf-8")
pargs = [pargs]
else:
for idx in range(len(pargs)):
- if isinstance(pargs[idx], unicode):
+ if isinstance(pargs[idx], six.text_type):
pargs[idx] = pargs[idx].encode("utf-8")
except Exception as e:
err = {"reason": "{0} is invalid.".format(
--- a/src/modules/client/debugvalues.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/debugvalues.py Wed Jul 01 16:20:01 2015 -0700
@@ -20,8 +20,11 @@
# CDDL HEADER END
#
-# Copyright 2009 Sun Microsystems, Inc. All rights reserved.
-# Use is subject to license terms.
+#
+# Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+#
+
+import six
class Singleton(type):
"""Set __metaclass__ to Singleton to create a singleton.
@@ -39,10 +42,9 @@
return self.instance
-class DebugValues(dict):
+class DebugValues(six.with_metaclass(Singleton, dict)):
"""Singleton dict that returns None if unknown value
is referenced"""
- __metaclass__ = Singleton
def __getitem__(self, item):
""" returns None if not set """
--- a/src/modules/client/firmware.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/firmware.py Wed Jul 01 16:20:01 2015 -0700
@@ -24,6 +24,7 @@
# Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved.
#
import os.path
+import six
import sys
import pkg.misc as misc
@@ -54,7 +55,7 @@
args = [os.path.join(firmware_dir, firmware_name[len("feature/firmware/"):])]
args.extend([
"{0}={1}".format(k, quote_attr_value(v))
- for k,v in sorted(dep_action.attrs.iteritems())
+ for k,v in sorted(six.iteritems(dep_action.attrs))
if k not in ["type", "root-image", "fmri"]
])
--- a/src/modules/client/history.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/history.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,6 +28,7 @@
import errno
import os
import shutil
+import six
import sys
import traceback
import xml.dom.minidom as xmini
@@ -442,7 +443,7 @@
rpath = os.path.join(self.root_dir,
"notes",
self.operation_release_notes)
- for a in file(rpath, "r"):
+ for a in open(rpath, "r"):
yield a.rstrip()
except Exception as e:
@@ -806,7 +807,7 @@
except (AttributeError, KeyError):
# Failing an exact match, determine if this
# error is a subclass of an existing one.
- for entry, val in error_results.iteritems():
+ for entry, val in six.iteritems(error_results):
if isinstance(error, entry):
result = val
break
@@ -842,7 +843,7 @@
output = traceback.format_exc()
use_current_stack = False
- if isinstance(error, basestring):
+ if isinstance(error, six.string_types):
output = error
elif use_current_stack:
# Assume the current stack is more useful if
@@ -898,7 +899,7 @@
if not self.__snapshot:
return
- for name, val in self.__snapshot.iteritems():
+ for name, val in six.iteritems(self.__snapshot):
if not name.startswith("__"):
object.__setattr__(self, name, val)
self.__operations = self.__snapshot["__operations"]
--- a/src/modules/client/image.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/image.py Wed Jul 01 16:20:01 2015 -0700
@@ -36,15 +36,14 @@
import platform
import shutil
import simplejson as json
+import six
import stat
import sys
import tempfile
import time
-import urllib
from contextlib import contextmanager
-from pkg.client import global_settings
-logger = global_settings.logger
+from six.moves.urllib.parse import quote, unquote
import pkg.actions
import pkg.catalog
@@ -75,6 +74,9 @@
import pkg.smf as smf
import pkg.version
+from pkg.client import global_settings
+logger = global_settings.logger
+
from pkg.client.debugvalues import DebugValues
from pkg.client.imagetypes import IMG_USER, IMG_ENTIRE
from pkg.client.transport.exception import InvalidContentException
@@ -1089,7 +1091,7 @@
"""Find the pkg's installed file named by filepath.
Return the publisher that installed this package."""
- f = file(filepath)
+ f = open(filepath)
try:
flines = f.readlines()
version, pub = flines
@@ -1132,7 +1134,7 @@
installed[f.pkg_name] = f
for pl in os.listdir(installed_state_dir):
- fmristr = "{0}".format(urllib.unquote(pl))
+ fmristr = "{0}".format(unquote(pl))
f = pkg.fmri.PkgFmri(fmristr)
add_installed_entry(f)
@@ -1394,7 +1396,7 @@
# format stores licenses.
dest = os.path.join(tmp_root, "license",
pfmri.get_dir_path(stemonly=True),
- urllib.quote(entry, ""))
+ quote(entry, ""))
misc.makedirs(os.path.dirname(dest))
try:
os.link(src, dest)
@@ -1599,7 +1601,7 @@
if not os.path.isfile(f):
return []
- return [ addslash(l.strip()) for l in file(f) ] + [p]
+ return [ addslash(l.strip()) for l in open(f) ] + [p]
def get_cachedirs(self):
"""Returns a list of tuples of the form (dir, readonly, pub,
@@ -2059,7 +2061,7 @@
def properties(self):
if not self.cfg:
raise apx.ImageCfgEmptyError(self.root)
- return self.cfg.get_index()["property"].keys()
+ return list(self.cfg.get_index()["property"].keys())
def add_publisher(self, pub, refresh_allowed=True, progtrack=None,
approved_cas=EmptyI, revoked_cas=EmptyI, search_after=None,
@@ -3078,7 +3080,7 @@
# copy() is too slow here and catalog entries
# are shallow so this should be sufficient.
- entry = dict(sentry.iteritems())
+ entry = dict(six.iteritems(sentry))
if not base:
# Nothing else to do except add the
# entry for non-base catalog parts.
@@ -3667,7 +3669,7 @@
os.unlink(conflicting_keys_path)
except:
pass
- raise exc_info[0], exc_info[1], exc_info[2]
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
progtrack.job_add_progress(progtrack.JOB_FAST_LOOKUP)
progtrack.job_done(progtrack.JOB_FAST_LOOKUP)
@@ -3760,7 +3762,7 @@
"""Open the actions file described in _create_fast_lookups() and
return the corresponding file object."""
- sf = file(os.path.join(self.__action_cache_dir,
+ sf = open(os.path.join(self.__action_cache_dir,
"actions.stripped"), "rb")
sversion = sf.readline().rstrip()
stimestamp = sf.readline().rstrip()
@@ -4160,9 +4162,9 @@
stems_and_pats = imageplan.ImagePlan.freeze_pkgs_match(
self, pat_list)
return dict([(s, __make_publisherless_fmri(p))
- for s, p in stems_and_pats.iteritems()])
+ for s, p in six.iteritems(stems_and_pats)])
if dry_run:
- return __calc_frozen().values()
+ return list(__calc_frozen().values())
with self.locked_op("freeze"):
stems_and_pats = __calc_frozen()
# Get existing dictionary of frozen packages.
@@ -4171,9 +4173,9 @@
# comment.
timestamp = calendar.timegm(time.gmtime())
d.update([(s, (str(p), comment, timestamp))
- for s, p in stems_and_pats.iteritems()])
+ for s, p in six.iteritems(stems_and_pats)])
self._freeze_dict_save(d)
- return stems_and_pats.values()
+ return list(stems_and_pats.values())
def unfreeze_pkgs(self, pat_list, progtrack, check_cancel, dry_run):
"""Unfreeze the specified packages... use pattern matching on
@@ -4339,8 +4341,8 @@
progtrack.plan_all_start()
# compute dict of changing variants
if variants:
- new = set(variants.iteritems())
- cur = set(self.cfg.variants.iteritems())
+ new = set(six.iteritems(variants))
+ cur = set(six.iteritems(self.cfg.variants))
variants = dict(new - cur)
elif facets:
new_facets = self.get_facets()
@@ -4641,7 +4643,7 @@
self.__avoid_set = set()
self.__group_obsolete = set()
if os.path.isfile(state_file):
- version, d = json.load(file(state_file))
+ version, d = json.load(open(state_file))
assert version == self.__AVOID_SET_VERSION
for stem in d:
if d[stem] == "avoid":
@@ -4672,7 +4674,7 @@
state_file = os.path.join(self._statedir, "avoid_set")
tmp_file = os.path.join(self._statedir, "avoid_set.new")
- tf = file(tmp_file, "w")
+ tf = open(tmp_file, "w")
d = dict((a, "avoid") for a in self.__avoid_set)
d.update((a, "obsolete") for a in self.__group_obsolete)
@@ -4713,7 +4715,7 @@
state_file = os.path.join(self._statedir, "frozen_dict")
if os.path.isfile(state_file):
try:
- version, d = json.load(file(state_file))
+ version, d = json.load(open(state_file))
except EnvironmentError as e:
raise apx._convert_error(e)
except ValueError as e:
--- a/src/modules/client/imageconfig.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/imageconfig.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,10 +28,9 @@
import os.path
import platform
import re
-import urllib
+import six
-from pkg.client import global_settings
-logger = global_settings.logger
+from six.moves.urllib.parse import quote, unquote
import pkg.client.api_errors as apx
import pkg.client.publisher as publisher
@@ -45,6 +44,9 @@
import pkg.smf as smf
import pkg.variant as variant
+from pkg.client import global_settings
+logger = global_settings.logger
+
from pkg.misc import DictProperty, SIGNATURE_POLICY
from pkg.client.debugvalues import DebugValues
from pkg.client.transport.exception import TransportFailures
@@ -346,15 +348,15 @@
def __publisher_iteritems(self):
"""Support iteritems on publishers"""
- return self.__publishers.iteritems()
+ return six.iteritems(self.__publishers)
def __publisher_keys(self):
"""Support keys() on publishers"""
- return self.__publishers.keys()
+ return list(self.__publishers.keys())
def __publisher_values(self):
"""Support values() on publishers"""
- return self.__publishers.values()
+ return list(self.__publishers.values())
def get_policy(self, policy):
"""Return a boolean value for the named policy. Returns
@@ -422,12 +424,12 @@
idx = self.get_index()
self.variants.update(idx.get("variant", {}))
# facets are encoded so they can contain '/' characters.
- for k, v in idx.get("facet", {}).iteritems():
+ for k, v in six.iteritems(idx.get("facet", {})):
# convert facet name from unicode to a string
- self.facets[str(urllib.unquote(k))] = v
- for k, v in idx.get("inherited_facet", {}).iteritems():
+ self.facets[str(unquote(k))] = v
+ for k, v in six.iteritems(idx.get("inherited_facet", {})):
# convert facet name from unicode to a string
- self.facets._set_inherited(str(urllib.unquote(k)), v)
+ self.facets._set_inherited(str(unquote(k)), v)
# Ensure architecture and zone variants are defined.
if "variant.arch" not in self.variants:
@@ -436,7 +438,7 @@
self.variants["variant.opensolaris.zone"] = "global"
# load linked image child properties
- for s, v in idx.iteritems():
+ for s, v in six.iteritems(idx):
if not re.match("linked_.*", s):
continue
linked_props = self.read_linked(s, v)
@@ -459,14 +461,14 @@
# Get updated configuration index.
idx = self.get_index()
- for s, v in idx.iteritems():
+ for s, v in six.iteritems(idx):
if re.match("authority_.*", s):
k, a = self.read_publisher(s, v)
self.publishers[k] = a
# Move any properties found in policy section (from older
# images) to the property section.
- for k, v in idx.get("policy", {}).iteritems():
+ for k, v in six.iteritems(idx.get("policy", {})):
self.set_property("property", k, v)
self.remove_property("policy", k)
@@ -491,7 +493,7 @@
self.set_property("property", "publisher-search-order", pso)
# Load mediator data.
- for entry, value in idx.get("mediators", {}).iteritems():
+ for entry, value in six.iteritems(idx.get("mediators", {})):
mname, mtype = entry.rsplit(".", 1)
# convert mediator name+type from unicode to a string
mname = str(mname)
@@ -568,7 +570,7 @@
# save local facets
for f in self.facets.local:
self.set_property("facet",
- urllib.quote(f, ""), self.facets.local[f])
+ quote(f, ""), self.facets.local[f])
try:
self.remove_section("inherited_facet")
@@ -577,14 +579,14 @@
# save inherited facets
for f in self.facets.inherited:
self.set_property("inherited_facet",
- urllib.quote(f, ""), self.facets.inherited[f])
+ quote(f, ""), self.facets.inherited[f])
try:
self.remove_section("mediators")
except cfg.UnknownSectionError:
pass
- for mname, mvalues in self.mediators.iteritems():
- for mtype, mvalue in mvalues.iteritems():
+ for mname, mvalues in six.iteritems(self.mediators):
+ for mtype, mvalue in six.iteritems(mvalues):
# name.implementation[-(source|version)]
# name.version[-source]
pname = mname + "." + mtype
@@ -592,7 +594,7 @@
# remove all linked image child configuration
idx = self.get_index()
- for s, v in idx.iteritems():
+ for s, v in six.iteritems(idx):
if not re.match("linked_.*", s):
continue
self.remove_section(s)
@@ -721,7 +723,7 @@
# the existing configuration.
secobj.remove_property(pname)
- for key, val in pub.properties.iteritems():
+ for key, val in six.iteritems(pub.properties):
if val == DEF_TOKEN:
continue
self.set_property(section,
@@ -872,7 +874,7 @@
{"uri": uri, "proxy": proxy})
props = {}
- for k, v in sec_idx.iteritems():
+ for k, v in six.iteritems(sec_idx):
if not k.startswith("property."):
continue
prop_name = k[len("property."):]
@@ -886,7 +888,7 @@
# Load repository data.
repo_data = {}
- for key, val in sec_idx.iteritems():
+ for key, val in six.iteritems(sec_idx):
if key.startswith("repo."):
pname = key[len("repo."):]
repo_data[pname] = val
@@ -1038,7 +1040,7 @@
def set_properties(self, properties):
"""Set multiple properties at one time."""
- if properties.keys() != ["property"]:
+ if list(properties.keys()) != ["property"]:
raise NotImplementedError
props = properties["property"]
if not all(k in self.__supported_props for k in props):
@@ -1273,8 +1275,8 @@
set(sys_cfg.publishers):
pold = old_sysconfig.publishers[prefix]
pnew = sys_cfg.publishers[prefix]
- if map(str, pold.repository.origins) != \
- map(str, pnew.repository.origins):
+ if list(map(str, pold.repository.origins)) != \
+ list(map(str, pnew.repository.origins)):
modified_pubs |= set([prefix])
if proxy_url:
@@ -1569,15 +1571,15 @@
def __publisher_iteritems(self):
"""Support iteritems on publishers"""
- return self.__publishers.iteritems()
+ return six.iteritems(self.__publishers)
def __publisher_keys(self):
"""Support keys() on publishers"""
- return self.__publishers.keys()
+ return list(self.__publishers.keys())
def __publisher_values(self):
"""Support values() on publishers"""
- return self.__publishers.values()
+ return list(self.__publishers.values())
# properties so we can enforce rules and manage two potentially
# overlapping sets of publishers
--- a/src/modules/client/imageplan.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/imageplan.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import mmap
import operator
import os
+import six
import stat
import sys
import tempfile
@@ -1474,7 +1475,7 @@
# build the pkg plans, making sure to propose only one repair
# per fmri
- for f, m in set(revert_dirs.keys() + revert_dict.keys()):
+ for f, m in set(list(revert_dirs.keys()) + list(revert_dict.keys())):
needs_delete = revert_dirs[(f, m)]
needs_change = revert_dict[(f, m)]
if not needs_delete and not needs_change:
@@ -1709,7 +1710,7 @@
def __dicts2fmrichanges(olddict, newdict):
return [
(olddict.get(k, None), newdict.get(k, None))
- for k in set(olddict.keys() + newdict.keys())
+ for k in set(list(olddict.keys()) + list(newdict.keys()))
]
def reboot_advised(self):
@@ -2202,7 +2203,7 @@
else:
msg, actions = ret
- if not isinstance(msg, basestring):
+ if not isinstance(msg, six.string_types):
return False
if msg == "nothing":
@@ -2545,7 +2546,7 @@
"""Check all the newly installed actions for conflicts with
existing actions."""
- for key, actions in new.iteritems():
+ for key, actions in six.iteritems(new):
oactions = old.get(key, [])
self.__progtrack.plan_add_progress(
@@ -2595,7 +2596,7 @@
# Ensure that overlay and preserve file semantics are handled
# as expected when conflicts only exist in packages that are
# being removed.
- for key, oactions in old.iteritems():
+ for key, oactions in six.iteritems(old):
self.__progtrack.plan_add_progress(
self.__progtrack.PLAN_ACTION_CONFLICT)
@@ -2627,12 +2628,12 @@
return None
bad_keys = set()
- for ns, key_dict in nsd.iteritems():
+ for ns, key_dict in six.iteritems(nsd):
if type(ns) != int:
type_func = ImagePlan.__check_inconsistent_types
else:
type_func = noop
- for key, actions in key_dict.iteritems():
+ for key, actions in six.iteritems(key_dict):
if len(actions) == 1:
continue
if type_func(actions, []) is not None:
@@ -2717,7 +2718,7 @@
# Group action types by namespace groups
kf = operator.attrgetter("namespace_group")
- types = sorted(pkg.actions.types.itervalues(), key=kf)
+ types = sorted(six.itervalues(pkg.actions.types), key=kf)
namespace_dict = dict(
(ns, list(action_classes))
@@ -2735,7 +2736,7 @@
fmri_dict = weakref.WeakValueDictionary()
# Iterate over action types in namespace groups first; our first
# check should be for action type consistency.
- for ns, action_classes in namespace_dict.iteritems():
+ for ns, action_classes in six.iteritems(namespace_dict):
pt.plan_add_progress(pt.PLAN_ACTION_CONFLICT)
# There's no sense in checking actions which have no
# limits
@@ -2769,8 +2770,8 @@
# cache which could conflict with the new
# actions being installed, or with actions
# already installed, but not getting removed.
- keys = set(itertools.chain(new.iterkeys(),
- old.iterkeys()))
+ keys = set(itertools.chain(six.iterkeys(new),
+ six.iterkeys(old)))
self.__update_act(keys, old, False,
offset_dict, action_classes, msf,
gone_fmris, fmri_dict)
@@ -2779,7 +2780,7 @@
# action cache which are staying on the system,
# and could conflict with the actions being
# installed.
- keys = set(old.iterkeys())
+ keys = set(six.iterkeys(old))
self.__update_act(keys, new, True,
offset_dict, action_classes, msf,
gone_fmris | changing_fmris, fmri_dict)
@@ -3314,7 +3315,7 @@
dir=dpath, prefix="release-notes-")
tmpfile = os.fdopen(fd, "wb")
for note in self.pd.release_notes[1]:
- if isinstance(note, unicode):
+ if isinstance(note, six.text_type):
note = note.encode("utf-8")
print(note, file=tmpfile)
# make file world readable
@@ -3708,7 +3709,7 @@
# mediations provided by the image administrator.
prop_mediators[m] = new_mediation
- for m, new_mediation in prop_mediators.iteritems():
+ for m, new_mediation in six.iteritems(prop_mediators):
# If after processing all mediation data, a source wasn't
# marked for a particular component, mark it as being
# sourced from 'system'.
@@ -3740,8 +3741,8 @@
mediation = cfg_mediators[m]
if any(
k
- for k in set(prop_mediators[m].keys() +
- mediation.keys())
+ for k in set(list(prop_mediators[m].keys()) +
+ list(mediation.keys()))
if prop_mediators[m].get(k) != mediation.get(k)):
# Mediation has changed.
break
@@ -4597,7 +4598,7 @@
root = os.path.normpath(self.image.root)
rzones = zone.list_running_zones()
- for z, path in rzones.iteritems():
+ for z, path in six.iteritems(rzones):
if os.path.normpath(path) == root:
self.pd._actuators.set_zone(z)
# there should be only on zone per path
@@ -4631,7 +4632,7 @@
# This prevents two drivers from ever attempting
# to have the same alias at the same time.
for name, aliases in \
- self.pd._rm_aliases.iteritems():
+ six.iteritems(self.pd._rm_aliases):
driver.DriverAction.remove_aliases(name,
aliases, self.image)
@@ -4725,8 +4726,8 @@
except:
# Ensure the real cause of failure is raised.
pass
- raise api_errors.InvalidPackageErrors([
- exc_value]), None, exc_tb
+ six.reraise(api_errors.InvalidPackageErrors([
+ exc_value]), None, exc_tb)
except:
exc_type, exc_value, exc_tb = sys.exc_info()
self.pd.state = plandesc.EXECUTED_ERROR
@@ -4737,7 +4738,7 @@
# This ensures that the original exception and
# traceback are used if exec_fail_actuators
# fails.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
else:
self.pd._actuators.exec_post_actuators(self.image)
@@ -4817,7 +4818,7 @@
def __is_image_empty(self):
try:
- self.image.gen_installed_pkg_names().next()
+ next(self.image.gen_installed_pkg_names())
return False
except StopIteration:
return True
@@ -5238,7 +5239,7 @@
# value.
atvalue = a.attrs["value"]
is_list = type(atvalue) == list
- for vn, vv in variants.iteritems():
+ for vn, vv in six.iteritems(variants):
if vn == atname and \
((is_list and
vv not in atvalue) or \
@@ -5330,13 +5331,13 @@
])))
else:
# single match or wildcard
- for k, pfmris in ret[p].iteritems():
+ for k, pfmris in six.iteritems(ret[p]):
# for each matching package name
matchdict.setdefault(k, []).append(
(p, pfmris))
proposed_dict = {}
- for name, lst in matchdict.iteritems():
+ for name, lst in six.iteritems(matchdict):
nwc_ps = [
(p, set(pfmris))
for p, pfmris in lst
@@ -5479,7 +5480,7 @@
# Rebuild proposed_dict based on latest version of every
# package.
sort_key = operator.attrgetter("version")
- for pname, flist in proposed_dict.iteritems():
+ for pname, flist in six.iteritems(proposed_dict):
# Must sort on version; sorting by FMRI would
# sort by publisher, then by version which is
# not desirable.
@@ -5553,7 +5554,7 @@
# For each fmri, pattern where the pattern matched the fmri
# including the version ...
- for full_fmri, pat in references.iteritems():
+ for full_fmri, pat in six.iteritems(references):
parts = pat.split("@", 1)
# If the pattern doesn't include a version, then add the
# version the package is installed at to the list of
@@ -5591,7 +5592,7 @@
anarchy=True, include_scheme=False), set()).add(p)
# Check whether one stem has been frozen at non-identical
# versions.
- for k, v in stems.iteritems():
+ for k, v in six.iteritems(stems):
if len(v) > 1:
multiversions.append((k, v))
else:
--- a/src/modules/client/indexer.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/indexer.py Wed Jul 01 16:20:01 2015 -0700
@@ -19,8 +19,10 @@
#
# CDDL HEADER END
#
-# Copyright 2009 Sun Microsystems, Inc. All rights reserved.
-# Use is subject to license terms.
+
+#
+# Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+#
# This Indexer class handles the client-side specific code for hashing the
--- a/src/modules/client/linkedimage/common.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/linkedimage/common.py Wed Jul 01 16:20:01 2015 -0700
@@ -43,11 +43,16 @@
# standard python classes
import collections
import copy
-import itertools
import operator
import os
import select
import simplejson as json
+import six
+
+# Redefining built-in 'reduce', 'zip'; pylint: disable=W0622
+# import-error: six.moves; pylint: disable=F0401
+from functools import reduce
+from six.moves import zip
# pkg classes
import pkg.actions
@@ -66,9 +71,6 @@
import pkg.pkgsubprocess
import pkg.version
-# Redefining built-in; pylint: disable=W0622
-from functools import reduce
-
from pkg.client import global_settings
logger = global_settings.logger
@@ -164,7 +166,7 @@
entries."""
assert(type(rvdict) == dict)
- for k, v in rvdict.iteritems():
+ for k, v in six.iteritems(rvdict):
assert type(k) == LinkedImageName, \
("Unexpected rvdict key: ", k)
_li_rvtuple_check(v)
@@ -515,12 +517,12 @@
# Tell linked image plugins about the updated paths
# Unused variable 'plugin'; pylint: disable=W0612
- for plugin, lip in self.__plugins.iteritems():
+ for plugin, lip in six.iteritems(self.__plugins):
# pylint: enable=W0612
lip.init_root(root)
# Tell linked image children about the updated paths
- for lic in self.__lic_dict.itervalues():
+ for lic in six.itervalues(self.__lic_dict):
lic.child_init_root()
def __update_props(self, props=None):
@@ -657,7 +659,7 @@
# ask each plugin if we're operating in an alternate root
p_transforms = []
- for plugin, lip in self.__plugins.iteritems():
+ for plugin, lip in six.iteritems(self.__plugins):
p_transform = lip.guess_path_transform(
ignore_errors=ignore_errors)
if p_transform is not PATH_TRANSFORM_NONE:
@@ -755,7 +757,7 @@
return None
rv = pkg.facet.Facets()
- for k, v in pfacets.iteritems():
+ for k, v in six.iteritems(pfacets):
# W0212 Access to a protected member
# pylint: disable=W0212
rv._set_inherited(k, v)
@@ -894,7 +896,7 @@
errs = []
# check each property the user specified.
- for k, v in props.iteritems():
+ for k, v in six.iteritems(props):
# did the user specify an allowable property?
if k not in validate_props:
@@ -1429,7 +1431,7 @@
self.set_path_transform(props, path_transform,
current_path=self.__root)
- for k, v in lip.attach_props_def.iteritems():
+ for k, v in six.iteritems(lip.attach_props_def):
if k not in self.__pull_child_props:
# this prop doesn't apply to pull images
continue
@@ -1570,14 +1572,14 @@
p_dicts = [
rvtuple.rvt_p_dict
- for rvtuple in rvdict.itervalues()
+ for rvtuple in six.itervalues(rvdict)
if rvtuple.rvt_p_dict is not None
]
rv_mapped = set()
rv_seen = set([
rvtuple.rvt_rv
- for rvtuple in rvdict.itervalues()
+ for rvtuple in six.itervalues(rvdict)
])
for (rv_map_set, rv_map_rv) in rv_map:
if (rv_seen == rv_map_set):
@@ -1591,7 +1593,7 @@
# if we had errors for unmapped return values, bundle them up
errs = [
rvtuple.rvt_e
- for rvtuple in rvdict.itervalues()
+ for rvtuple in six.itervalues(rvdict)
if rvtuple.rvt_e and rvtuple.rvt_rv not in rv_mapped
]
if len(errs) == 1:
@@ -1755,7 +1757,7 @@
dir_common = os.sep
pdirs = ppath.split(os.sep)[1:-1]
cdirs = cpath.split(os.sep)[1:-1]
- for pdir, cdir in itertools.izip(pdirs, cdirs):
+ for pdir, cdir in zip(pdirs, cdirs):
if pdir != cdir:
break
dir_common = os.path.join(dir_common, pdir)
@@ -1846,7 +1848,7 @@
self.get_path_transform(), current_path=path)
# fill in any missing defaults options
- for k, v in lip.attach_props_def.iteritems():
+ for k, v in six.iteritems(lip.attach_props_def):
if k not in child_props:
child_props[k] = v
@@ -1915,7 +1917,7 @@
list(self.__children_op(
_pkg_op=pkgdefs.PKG_OP_AUDIT_LINKED,
- _lic_list=lic_dict.values(),
+ _lic_list=list(lic_dict.values()),
_rvdict=rvdict,
_progtrack=progress.QuietProgressTracker(),
_failfast=False))
@@ -1942,7 +1944,7 @@
rvdict = {}
list(self.__children_op(
_pkg_op=pkgdefs.PKG_OP_SYNC,
- _lic_list=lic_dict.values(),
+ _lic_list=list(lic_dict.values()),
_rvdict=rvdict,
_progtrack=progtrack,
_failfast=False,
@@ -1988,7 +1990,7 @@
# do the detach
list(self.__children_op(
_pkg_op=pkgdefs.PKG_OP_DETACH,
- _lic_list=lic_dict.values(),
+ _lic_list=list(lic_dict.values()),
_rvdict=rvdict,
_progtrack=progress.NullProgressTracker(),
_failfast=False,
@@ -1998,7 +2000,7 @@
# if any of the children successfully detached, then we want
# to discard our metadata for that child.
- for lin, rvtuple in rvdict.iteritems():
+ for lin, rvtuple in six.iteritems(rvdict):
# if the detach failed leave metadata in parent
if rvtuple.rvt_e and not force:
@@ -2355,7 +2357,7 @@
"""Do a recursive publisher check"""
# get a list of of children to recurse into.
- lic_list = self.__lic_dict.values()
+ lic_list = list(self.__lic_dict.values())
# do a publisher check on all of them
rvdict = {}
@@ -2379,7 +2381,7 @@
pd = self.__img.imageplan.pd
# get a list of of children to recurse into.
- lic_list = self.__lic_dict.values()
+ lic_list = list(self.__lic_dict.values())
# sanity check stage
assert stage in [pkgdefs.API_STAGE_PLAN,
@@ -2591,7 +2593,7 @@
pd.child_op_vectors = []
# Get LinkedImageNames of all children
- lin_list = self.__lic_dict.keys()
+ lin_list = list(self.__lic_dict.keys())
pkg_op_irecurse, pkg_op_erecurse, ignore_syncmd_nop = \
self.__recursion_ops(api_op)
@@ -2680,7 +2682,7 @@
"""Return True if there is no planned work to do on child
image."""
- for lic in self.__lic_dict.itervalues():
+ for lic in six.itervalues(self.__lic_dict):
if lic.child_name not in \
self.__img.imageplan.pd.children_nop:
return False
@@ -3622,12 +3624,12 @@
"""Utility class used when json decoding linked image metadata."""
# Replace unicode keys/values with strings
rvdct = {}
- for k, v in dct.iteritems():
+ for k, v in six.iteritems(dct):
# unicode must die
- if type(k) == unicode:
+ if type(k) == six.text_type:
k = k.encode("utf-8")
- if type(v) == unicode:
+ if type(v) == six.text_type:
v = v.encode("utf-8")
# convert boolean strings values back into booleans
@@ -3644,7 +3646,7 @@
"""Remove a set of keys from a dictionary."""
return dict([
(k, v)
- for k, v in d.iteritems()
+ for k, v in six.iteritems(d)
if k not in keys
])
--- a/src/modules/client/linkedimage/zone.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/linkedimage/zone.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
# standard python classes
import os
+import six
import tempfile
# pkg classes
@@ -261,7 +262,7 @@
zlist = []
# state is unused
# pylint: disable=W0612
- for zone, (path, state) in zdict.iteritems():
+ for zone, (path, state) in six.iteritems(zdict):
lin = li.LinkedImageName("{0}:{1}".format(self.__pname,
zone))
zlist.append([lin, path])
@@ -330,7 +331,7 @@
assert li.PROP_PATH in props
props[li.PROP_MODEL] = li.PV_MODEL_PUSH
- for k, v in self.attach_props_def.iteritems():
+ for k, v in six.iteritems(self.attach_props_def):
if k not in props:
props[k] = v
@@ -520,7 +521,7 @@
zdict = _list_zones("/", li.PATH_TRANSFORM_NONE)
rzdict = {}
- for z_name, (z_path, z_state) in zdict.iteritems():
+ for z_name, (z_path, z_state) in six.iteritems(zdict):
if z_state == ZONE_STATE_STR_RUNNING:
rzdict[z_name] = z_path
--- a/src/modules/client/pkg_solver.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/pkg_solver.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,6 +28,7 @@
should be installed, updated, or removed to perform a requested operation."""
import operator
+import six
import time
import pkg.actions
@@ -469,7 +470,7 @@
break
# Remove trimmed items from possible_set.
- possible.difference_update(self.__trim_dict.iterkeys())
+ possible.difference_update(six.iterkeys(self.__trim_dict))
def __enforce_unique_packages(self, excludes):
"""Constrain the solver solution so that only one version of
@@ -973,7 +974,7 @@
self.__start_subphase(10)
# remove all trimmed fmris from consideration
- possible_set.difference_update(self.__trim_dict.iterkeys())
+ possible_set.difference_update(six.iterkeys(self.__trim_dict))
# remove any versions from proposed_dict that are in trim_dict
# as trim dict has been updated w/ missing dependencies
try:
@@ -1120,7 +1121,7 @@
self.__start_subphase(4)
# remove all trimmed fmris from consideration
- possible_set.difference_update(self.__trim_dict.iterkeys())
+ possible_set.difference_update(six.iterkeys(self.__trim_dict))
#
# Generate ids, possible_dict for clause generation. Prepare
@@ -1404,7 +1405,7 @@
# assign clause numbers (ids) to possible pkgs
pkgid = 1
- for name in sorted(self.__possible_dict.iterkeys()):
+ for name in sorted(six.iterkeys(self.__possible_dict)):
for fmri in reversed(self.__possible_dict[name]):
self.__id2fmri[pkgid] = fmri
self.__fmri2id[fmri] = pkgid
@@ -2267,7 +2268,7 @@
assert self.__state != SOLVER_INIT
assert DebugValues["plan"]
- return self.__fmri_list_errors(self.__trim_dict.iterkeys(),
+ return self.__fmri_list_errors(six.iterkeys(self.__trim_dict),
already_seen=set(), verbose=True)
def __check_installed(self):
@@ -2751,7 +2752,7 @@
# values that start w/ the relaxed ones...
relaxed_holds |= set([
hold
- for hold in install_holds.itervalues()
+ for hold in six.itervalues(install_holds)
if [ r for r in relaxed_holds if hold.startswith(r + ".") ]
])
@@ -2775,7 +2776,7 @@
# those holds were relaxed.
versioned_dependents -= set([
pkg_name
- for pkg_name, hold_value in install_holds.iteritems()
+ for pkg_name, hold_value in six.iteritems(install_holds)
if hold_value not in relaxed_holds
])
# Build the list of fmris that 1) contain incorp. dependencies
--- a/src/modules/client/pkgplan.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/pkgplan.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,6 +29,7 @@
import itertools
import os
import pwd
+import six
import stat
import pkg.actions
@@ -102,7 +103,7 @@
__state__desc = {
"_autofix_pkgs": [ pkg.fmri.PkgFmri ],
"_license_status": {
- basestring: {
+ six.string_types[0]: {
"src": pkg.actions.generic.NSG,
"dest": pkg.actions.generic.NSG,
},
@@ -426,7 +427,7 @@
entry). Where 'entry' is a dict containing the license status
information."""
- for lic, entry in self._license_status.iteritems():
+ for lic, entry in six.iteritems(self._license_status):
yield lic, entry
def set_license_status(self, plicense, accepted=None, displayed=None):
--- a/src/modules/client/plandesc.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/plandesc.py Wed Jul 01 16:20:01 2015 -0700
@@ -41,6 +41,7 @@
import itertools
import operator
import simplejson as json
+import six
import pkg.actions
import pkg.client.actuator
@@ -510,7 +511,7 @@
vs = []
if self._new_variants:
- vs = self._new_variants.items()
+ vs = list(self._new_variants.items())
# sort results by variant name
vs.sort(key=lambda x: x[0])
@@ -725,7 +726,7 @@
if ordered:
# Sort all the item messages by msg_time
- ordered_list = sorted(self._item_msgs.iteritems(),
+ ordered_list = sorted(six.iteritems(self._item_msgs),
key=lambda k_v: k_v[1][0][0])
for item in ordered_list:
item_id = item[0]
--- a/src/modules/client/printengine.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/printengine.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,10 +33,11 @@
import logging
import os
import re
+import six
+import StringIO
import termios
import time
from abc import ABCMeta, abstractmethod
-import StringIO
from pkg.misc import PipeError
@@ -46,9 +47,8 @@
def __str__(self):
return "PrintEngineException: {0}".format(" ".join(self.args))
-class PrintEngine(object):
+class PrintEngine(six.with_metaclass(ABCMeta, object)):
"""Abstract class defining what a PrintEngine must know how to do."""
- __metaclass__ = ABCMeta
def __init__(self):
pass
@@ -315,13 +315,13 @@
pe.cprint("left to right it should be inverse.")
# Unused variable 'y'; pylint: disable=W0612
for y in range(0, 2):
- for x in xrange(0, 30, 1):
+ for x in range(0, 30, 1):
pe.cprint(" " * x, erase=True, end='')
pe.putp(standout)
pe.cprint("X", end='')
pe.putp(sgr0)
time.sleep(0.050)
- for x in xrange(30, -1, -1):
+ for x in range(30, -1, -1):
pe.cprint(" " * x + "X", erase=True, end='')
time.sleep(0.050)
pe.cprint("", erase=True)
--- a/src/modules/client/progress.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/progress.py Wed Jul 01 16:20:01 2015 -0700
@@ -36,8 +36,12 @@
import math
import sys
import simplejson as json
+import six
import time
from functools import wraps
+# Redefining built-in 'range'; pylint: disable=W0622
+# import-error: six.moves; pylint: disable=F0401
+from six.moves import range
import pkg.client.pkgdefs as pkgdefs
import pkg.client.publisher as publisher
@@ -340,10 +344,14 @@
s += ">"
return s
- def __nonzero__(self):
- return (bool(self.first) or bool(self.last) or
- bool(self.changed))
-
+ # Defining "boolness" of a class, Python 2 uses the special method
+ # called __nonzero__() while Python 3 uses __bool__(). For Python
+ # 2 and 3 compatibility, define __bool__() only, and let
+ # __nonzero__ = __bool__
+ def __bool__(self):
+ return bool(self.first) or bool(self.last) or bool(self.changed)
+
+ __nonzero__ = __bool__
class TrackerItem(object):
"""This class describes an item of interest in tracking progress
@@ -1698,7 +1706,8 @@
# Look in the ProgressTrackerFrontend for a list of frontend
# methods to multiplex.
#
- for methname, m in ProgressTrackerFrontend.__dict__.iteritems():
+ for methname, m in six.iteritems(
+ ProgressTrackerFrontend.__dict__):
if methname == "__init__":
continue
if not inspect.isfunction(m):
@@ -3317,8 +3326,8 @@
hunkmax = 8192
approx_time = 5.0 * fast # how long we want the dl to take
# invent a list of random download chunks.
- for pkgname, filelist in dlscript.iteritems():
- for f in xrange(0, perpkgfiles):
+ for pkgname, filelist in six.iteritems(dlscript):
+ for f in range(0, perpkgfiles):
filesize = random.randint(0, filesizemax)
hunks = []
while filesize > 0:
@@ -3334,7 +3343,7 @@
try:
t.download_set_goal(len(dlscript), pkggoalfiles, pkggoalbytes)
n = 0
- for pkgname, pkgfiles in dlscript.iteritems():
+ for pkgname, pkgfiles in six.iteritems(dlscript):
fmri = pkg.fmri.PkgFmri(pkgname)
t.download_start_pkg(fmri)
for pkgfile in pkgfiles:
@@ -3351,7 +3360,7 @@
t.reset_download()
t.republish_set_goal(len(dlscript), pkggoalbytes, pkggoalbytes)
n = 0
- for pkgname, pkgfiles in dlscript.iteritems():
+ for pkgname, pkgfiles in six.iteritems(dlscript):
fmri = pkg.fmri.PkgFmri(pkgname)
t.republish_start_pkg(fmri)
for pkgfile in pkgfiles:
@@ -3369,7 +3378,7 @@
t.reset_download()
t.archive_set_goal("testarchive", pkggoalfiles, pkggoalbytes)
n = 0
- for pkgname, pkgfiles in dlscript.iteritems():
+ for pkgname, pkgfiles in six.iteritems(dlscript):
for pkgfile in pkgfiles:
for hunk in pkgfile:
t.archive_add_progress(0, hunk)
--- a/src/modules/client/publisher.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/publisher.py Wed Jul 01 16:20:01 2015 -0700
@@ -44,15 +44,14 @@
import os
import pycurl
import shutil
+import six
import tempfile
import time
-import urllib
-import urlparse
import uuid
-from pkg.client import global_settings
-from pkg.client.debugvalues import DebugValues
-logger = global_settings.logger
+from six.moves.urllib.parse import quote, urlsplit, urlparse, urlunparse, \
+ ParseResult
+from six.moves.urllib.request import url2pathname
import pkg.catalog
import pkg.client.api_errors as api_errors
@@ -64,6 +63,9 @@
import pkg.server.catalog as old_catalog
import M2Crypto as m2
+from pkg.client import global_settings
+from pkg.client.debugvalues import DebugValues
+logger = global_settings.logger
from pkg.misc import EmptyDict, EmptyI, SIGNATURE_POLICY, DictProperty, \
PKG_RO_FILE_MODE
@@ -260,7 +262,7 @@
raise api_errors.UnsupportedRepositoryURIAttribute(
"ssl_cert", scheme=self.scheme)
if filename:
- if not isinstance(filename, basestring):
+ if not isinstance(filename, six.string_types):
raise api_errors.BadRepositoryAttributeValue(
"ssl_cert", value=filename)
filename = os.path.normpath(filename)
@@ -273,7 +275,7 @@
raise api_errors.UnsupportedRepositoryURIAttribute(
"ssl_key", scheme=self.scheme)
if filename:
- if not isinstance(filename, basestring):
+ if not isinstance(filename, six.string_types):
raise api_errors.BadRepositoryAttributeValue(
"ssl_key", value=filename)
filename = os.path.normpath(filename)
@@ -306,7 +308,7 @@
# Decompose URI to verify attributes.
scheme, netloc, path, params, query = \
- urlparse.urlsplit(uri, allow_fragments=0)
+ urlsplit(uri, allow_fragments=0)
self.__scheme = scheme.lower()
self.__netloc = netloc
@@ -356,17 +358,17 @@
assert self.__uri
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(self.__uri, allow_fragments=False)
+ urlparse(self.__uri, allow_fragments=False)
if new_scheme == scheme:
return
- self.uri = urlparse.urlunparse(
+ self.uri = urlunparse(
(new_scheme, netloc, path, params, query, fragment))
def get_host(self):
"""Get the host and port of this URI if it's a http uri."""
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(self.__uri, allow_fragments=0)
+ urlparse(self.__uri, allow_fragments=0)
if scheme != "file":
return netloc
return ""
@@ -376,9 +378,9 @@
URI or '' otherwise."""
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(self.__uri, allow_fragments=0)
+ urlparse(self.__uri, allow_fragments=0)
if scheme == "file":
- return urllib.url2pathname(path)
+ return url2pathname(path)
return ""
ssl_cert = property(lambda self: self.__ssl_cert, __set_ssl_cert, None,
@@ -407,7 +409,7 @@
"""The URI scheme."""
if not self.__uri:
return ""
- return urlparse.urlsplit(self.__uri, allow_fragments=0)[0]
+ return urlsplit(self.__uri, allow_fragments=0)[0]
trailing_slash = property(lambda self: self.__trailing_slash,
__set_trailing_slash, None,
@@ -497,7 +499,7 @@
if isinstance(other, TransportRepoURI):
return self.uri == other.uri and \
self.proxy == other.proxy
- if isinstance(other, basestring):
+ if isinstance(other, six.string_types):
return self.uri == other and self.proxy == None
return False
@@ -505,14 +507,14 @@
if isinstance(other, TransportRepoURI):
return self.uri != other.uri or \
self.proxy != other.proxy
- if isinstance(other, basestring):
+ if isinstance(other, six.string_types):
return self.uri != other or self.proxy != None
return True
def __cmp__(self, other):
if not other:
return 1
- if isinstance(other, basestring):
+ if isinstance(other, six.string_types):
other = TransportRepoURI(other)
elif not isinstance(other, TransportRepoURI):
return 1
@@ -1743,7 +1745,7 @@
pubs=[self.prefix]):
pub, stem, ver = t
- entry = dict(sentry.iteritems())
+ entry = dict(six.iteritems(sentry))
try:
npart.add(metadata=entry,
op_time=op_time, pub=pub,
@@ -2602,9 +2604,9 @@
raise api_errors.InvalidResourceLocation(uri.strip())
crl_host = DebugValues.get_value("crl_host")
if crl_host:
- orig = urlparse.urlparse(uri)
- crl = urlparse.urlparse(crl_host)
- uri = urlparse.urlunparse(urlparse.ParseResult(
+ orig = urlparse(uri)
+ crl = urlparse(crl_host)
+ uri = urlunparse(ParseResult(
scheme=crl.scheme, netloc=crl.netloc,
path=orig.path,
params=orig.params, query=orig.params,
@@ -2613,7 +2615,7 @@
# object.
if uri in self.__tmp_crls:
return self.__tmp_crls[uri]
- fn = urllib.quote(uri, "")
+ fn = quote(uri, "")
assert os.path.isdir(self.__crl_root)
fpath = os.path.join(self.__crl_root, fn)
crl = None
@@ -2797,7 +2799,7 @@
# recognized.
check_values(vs)
uses = usages.get(name, [])
- if isinstance(uses, basestring):
+ if isinstance(uses, six.string_types):
uses = [uses]
# For each use, check to see whether it's
# permitted by the certificate's extension
@@ -2836,7 +2838,7 @@
certs_with_problems = []
ca_dict = copy.copy(ca_dict)
- for k, v in self.get_ca_certs().iteritems():
+ for k, v in six.iteritems(self.get_ca_certs()):
if k in ca_dict:
ca_dict[k].extend(v)
else:
@@ -3027,7 +3029,7 @@
if name == SIGNATURE_POLICY:
self.__sig_policy = None
- if isinstance(values, basestring):
+ if isinstance(values, six.string_types):
values = [values]
policy_name = values[0]
if policy_name not in sigpolicy.Policy.policies():
@@ -3061,7 +3063,7 @@
self.__properties[SIGNATURE_POLICY] = policy_name
return
if name == "signature-required-names":
- if isinstance(values, basestring):
+ if isinstance(values, six.string_types):
values = self.__read_list(values)
self.__properties[name] = values
@@ -3078,15 +3080,15 @@
def __prop_iteritems(self):
"""Support iteritems on properties"""
- return self.__properties.iteritems()
+ return six.iteritems(self.__properties)
def __prop_keys(self):
"""Support keys() on properties"""
- return self.__properties.keys()
+ return list(self.__properties.keys())
def __prop_values(self):
"""Support values() on properties"""
- return self.__properties.values()
+ return list(self.__properties.values())
def __prop_getdefault(self, name, value):
"""Support getdefault() on properties"""
@@ -3104,7 +3106,7 @@
def __prop_update(self, d):
"""Support update() on properties"""
- for k, v in d.iteritems():
+ for k, v in six.iteritems(d):
# Must iterate through each value and
# set it this way so that the logic
# in __set_prop is used.
--- a/src/modules/client/sigpolicy.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/sigpolicy.py Wed Jul 01 16:20:01 2015 -0700
@@ -24,6 +24,7 @@
# Copyright (c) 2010, 2015, Oracle and/or its affiliates. All rights reserved.
#
+import six
import pkg.client.api_errors as apx
class Policy(object):
@@ -157,7 +158,7 @@
assert req_names, "RequireNames requires at least one name " \
"to be passed to the constructor."
Policy.__init__(self, *args, **kwargs)
- if isinstance(req_names, basestring):
+ if isinstance(req_names, six.string_types):
req_names = [req_names]
self.required_names = frozenset(req_names)
--- a/src/modules/client/transport/engine.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/engine.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,11 +27,13 @@
from __future__ import division
import errno
-import httplib
import os
import pycurl
+import six
import time
-import urlparse
+
+from six.moves import http_client
+from six.moves.urllib.parse import urlsplit
# Need to ignore SIGPIPE if using pycurl in NOSIGNAL mode.
try:
@@ -235,7 +237,7 @@
url = h.url
uuid = h.uuid
urlstem = h.repourl
- proto = urlparse.urlsplit(url)[0]
+ proto = urlsplit(url)[0]
# When using pipelined operations, libcurl tracks the
# amount of time taken for the entire pipelined request
@@ -332,7 +334,7 @@
url = h.url
uuid = h.uuid
urlstem = h.repourl
- proto = urlparse.urlsplit(url)[0]
+ proto = urlsplit(url)[0]
# When using pipelined operations, libcurl tracks the
# amount of time taken for the entire pipelined request
@@ -380,7 +382,7 @@
respcode = h.getinfo(pycurl.RESPONSE_CODE)
if proto not in response_protocols or \
- respcode == httplib.OK:
+ respcode == http_client.OK:
h.success = True
repostats.clear_consecutive_errors()
success.append(url)
@@ -845,13 +847,13 @@
headerlist = []
# Headers common to all requests
- for k, v in self.__common_header.iteritems():
+ for k, v in six.iteritems(self.__common_header):
headerstr = "{0}: {1}".format(k, v)
headerlist.append(headerstr)
# Headers specific to this request
if treq.header:
- for k, v in treq.header.iteritems():
+ for k, v in six.iteritems(treq.header):
headerstr = "{0}: {1}".format(k, v)
headerlist.append(headerstr)
@@ -908,7 +910,7 @@
hdl.setopt(pycurl.NOPROGRESS, 0)
hdl.setopt(pycurl.PROGRESSFUNCTION, treq.progfunc)
- proto = urlparse.urlsplit(treq.url)[0]
+ proto = urlsplit(treq.url)[0]
if not proto in ("http", "https"):
return
--- a/src/modules/client/transport/exception.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/exception.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,18 +25,19 @@
#
import errno
-import httplib
import pycurl
+from six.moves import http_client
+
+retryable_http_errors = set((http_client.REQUEST_TIMEOUT, http_client.BAD_GATEWAY,
+ http_client.GATEWAY_TIMEOUT, http_client.NOT_FOUND))
+retryable_file_errors = set((pycurl.E_FILE_COULDNT_READ_FILE, errno.EAGAIN,
+ errno.ENOENT))
+
import pkg.client.api_errors as api_errors
-retryable_http_errors = set((httplib.REQUEST_TIMEOUT, httplib.BAD_GATEWAY,
- httplib.GATEWAY_TIMEOUT, httplib.NOT_FOUND))
-retryable_file_errors = set((pycurl.E_FILE_COULDNT_READ_FILE, errno.EAGAIN,
- errno.ENOENT))
-
# Errors that stats.py may include in a decay-able error rate
-decayable_http_errors = set((httplib.NOT_FOUND,))
+decayable_http_errors = set((http_client.NOT_FOUND,))
decayable_file_errors = set((pycurl.E_FILE_COULDNT_READ_FILE, errno.EAGAIN,
errno.ENOENT))
decayable_pycurl_errors = set((pycurl.E_OPERATION_TIMEOUTED,
@@ -58,8 +59,8 @@
}
proto_code_map = {
- "http": httplib.responses,
- "https": httplib.responses
+ "http": http_client.responses,
+ "https": http_client.responses
}
retryable_pycurl_errors = set((pycurl.E_COULDNT_CONNECT, pycurl.E_PARTIAL_FILE,
--- a/src/modules/client/transport/fileobj.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/fileobj.py Wed Jul 01 16:20:01 2015 -0700
@@ -267,12 +267,14 @@
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
line = self.readline()
if not line:
raise StopIteration
return line
+ next = __next__
+
# Private methods
def __fill_buffer(self, size=-1):
--- a/src/modules/client/transport/mdetect.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/mdetect.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,6 +25,7 @@
#
import random
+from six.moves import range
import pkg.misc as misc
import pkg.client.publisher as pub
@@ -58,7 +59,7 @@
five mirrors from the list of available mirrors."""
listlen = len(self._mirrors)
- iterlst = random.sample(xrange(listlen), min(listlen, 5))
+ iterlst = random.sample(range(listlen), min(listlen, 5))
for v in iterlst:
yield self._mirrors[v]
--- a/src/modules/client/transport/repo.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/repo.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,13 +26,19 @@
import cStringIO
import errno
-import httplib
import itertools
import os
+import shutil
import simplejson as json
+import six
import sys
-import urlparse
-import urllib
+import tempfile
+
+from email.utils import formatdate
+from six.moves import http_client
+from six.moves.urllib.parse import quote, urlencode, urlsplit, urlparse, \
+ urlunparse, urljoin
+from six.moves.urllib.request import url2pathname, pathname2url
import pkg
import pkg.p5i as p5i
@@ -44,10 +50,7 @@
import pkg.server.repository as svr_repo
import pkg.server.query_parser as sqp
-from email.utils import formatdate
from pkg.misc import N_
-import tempfile
-import shutil
class TransportRepo(object):
"""The TransportRepo class handles transport requests.
@@ -233,7 +236,7 @@
if not mapping:
# Request is basename of path portion of URI.
- e.request = os.path.basename(urlparse.urlsplit(
+ e.request = os.path.basename(urlsplit(
e.url)[2])
continue
@@ -289,7 +292,7 @@
for u in urllist:
if not mapping:
- utup = urlparse.urlsplit(u)
+ utup = urlsplit(u)
req = utup[2]
req = os.path.basename(req)
reqlist.append(req)
@@ -421,7 +424,7 @@
fobj.free_buffer = False
fobj.read()
except tx.TransportProtoError as e:
- if e.code == httplib.BAD_REQUEST:
+ if e.code == http_client.BAD_REQUEST:
exc_type, exc_value, exc_tb = sys.exc_info()
try:
e.details = self._parse_html_error(
@@ -429,7 +432,7 @@
except:
# If parse fails, raise original
# exception.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
raise
finally:
fobj.close()
@@ -459,18 +462,18 @@
not base.endswith("/{0}/".format(pub_prefix)) and \
self.supports_version("publisher", [1]) > -1:
# Append the publisher prefix to the repository URL.
- base = urlparse.urljoin(base, pub_prefix) + "/"
-
- uri = urlparse.urljoin(base, methodstr)
+ base = urljoin(base, pub_prefix) + "/"
+
+ uri = urljoin(base, methodstr)
if not query:
return uri
# If a set of query data was provided, then decompose the URI
# into its component parts and replace the query portion with
# the encoded version of the new query data.
- components = list(urlparse.urlparse(uri))
- components[4] = urllib.urlencode(query)
- return urlparse.urlunparse(components)
+ components = list(urlparse(uri))
+ components[4] = urlencode(query)
+ return urlunparse(components)
def do_search(self, data, header=None, ccancel=None, pub=None):
"""Perform a remote search against origin repos."""
@@ -478,13 +481,13 @@
requesturl = self.__get_request_url("search/1/", pub=pub)
if len(data) > 1:
# Post and retrieve.
- request_data = urllib.urlencode(
+ request_data = urlencode(
[(i, str(q)) for i, q in enumerate(data)])
return self._post_url(requesturl, request_data,
header, ccancel=ccancel)
# Retrieval only.
- requesturl = urlparse.urljoin(requesturl, urllib.quote(
+ requesturl = urljoin(requesturl, quote(
str(data[0]), safe=''))
return self._fetch_url(requesturl, header, ccancel=ccancel)
@@ -541,7 +544,7 @@
progclass = CatalogProgress
for f in filelist:
- url = urlparse.urljoin(baseurl, f)
+ url = urljoin(baseurl, f)
urllist.append(url)
fn = os.path.join(destloc, f)
self._add_file_url(url, filepath=fn, header=headers,
@@ -588,7 +591,7 @@
baseurl = self.__get_request_url("file/{0}/".format(version),
pub=pub)
- requesturl = urlparse.urljoin(baseurl, fhash)
+ requesturl = urljoin(baseurl, fhash)
return self._fetch_url(requesturl, header, ccancel=ccancel)
def get_publisherinfo(self, header=None, ccancel=None):
@@ -623,7 +626,7 @@
mfst = fmri.get_url_path()
baseurl = self.__get_request_url("manifest/0/", pub=pub)
- requesturl = urlparse.urljoin(baseurl, mfst)
+ requesturl = urljoin(baseurl, mfst)
return self._fetch_url(requesturl, header, compress=True,
ccancel=ccancel)
@@ -643,7 +646,7 @@
for fmri, h in mfstlist:
f = fmri.get_url_path()
- url = urlparse.urljoin(baseurl, f)
+ url = urljoin(baseurl, f)
urlmapping[url] = fmri
fn = os.path.join(dest, f)
self._add_file_url(url, filepath=fn, header=h,
@@ -700,7 +703,7 @@
progclass = FileProgress
for f in filelist:
- url = urlparse.urljoin(baseurl, f)
+ url = urljoin(baseurl, f)
urllist.append(url)
fn = os.path.join(dest, f)
self._add_file_url(url, filepath=fn,
@@ -764,7 +767,7 @@
# use .read() since this will empty the data buffer.
fobj.getheader("octopus", None)
except tx.TransportProtoError as e:
- if e.code == httplib.UNAUTHORIZED:
+ if e.code == http_client.UNAUTHORIZED:
exc_type, exc_value, exc_tb = sys.exc_info()
try:
e.details = self._analyze_server_error(
@@ -773,7 +776,7 @@
except:
# If analysis fails, raise original
# exception.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
raise
return fobj
@@ -800,7 +803,7 @@
baseurl = self.__get_request_url("add/0/")
request_str = "{0}/{1}".format(trans_id, action.name)
- requesturl = urlparse.urljoin(baseurl, request_str)
+ requesturl = urljoin(baseurl, request_str)
if action.data:
data_fobj = action.data()
@@ -829,7 +832,7 @@
attrs = {}
baseurl = self.__get_request_url("file/1/")
- requesturl = urlparse.urljoin(baseurl, trans_id)
+ requesturl = urljoin(baseurl, trans_id)
headers = dict(
("X-IPkg-SetAttr{0}".format(i), "{0}={1}".format(k, attrs[k]))
@@ -849,7 +852,7 @@
a (publish-state, fmri) tuple."""
baseurl = self.__get_request_url("abandon/0/")
- requesturl = urlparse.urljoin(baseurl, trans_id)
+ requesturl = urljoin(baseurl, trans_id)
fobj = self._fetch_url(requesturl, header=header,
failonerror=False)
@@ -859,7 +862,7 @@
state = fobj.getheader("State", None)
pkgfmri = fobj.getheader("Package-FMRI", None)
except tx.TransportProtoError as e:
- if e.code == httplib.BAD_REQUEST:
+ if e.code == http_client.BAD_REQUEST:
exc_type, exc_value, exc_tb = sys.exc_info()
try:
e.details = self._parse_html_error(
@@ -867,7 +870,7 @@
except:
# If parse fails, raise original
# exception.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
raise
finally:
fobj.close()
@@ -888,7 +891,7 @@
headers.update(header)
baseurl = self.__get_request_url("close/0/")
- requesturl = urlparse.urljoin(baseurl, trans_id)
+ requesturl = urljoin(baseurl, trans_id)
fobj = self._fetch_url(requesturl, header=headers,
failonerror=False)
@@ -899,7 +902,7 @@
state = fobj.getheader("State", None)
pkgfmri = fobj.getheader("Package-FMRI", None)
except tx.TransportProtoError as e:
- if e.code == httplib.BAD_REQUEST:
+ if e.code == http_client.BAD_REQUEST:
exc_type, exc_value, exc_tb = sys.exc_info()
try:
e.details = self._parse_html_error(
@@ -907,7 +910,7 @@
except:
# If parse fails, raise original
# exception.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
raise
finally:
@@ -928,8 +931,8 @@
def __start_trans(self, baseurl, header, client_release, pkg_name):
"""Start a publication transaction."""
- request_str = urllib.quote(pkg_name, "")
- requesturl = urlparse.urljoin(baseurl, request_str)
+ request_str = quote(pkg_name, "")
+ requesturl = urljoin(baseurl, request_str)
headers = {"Client-Release": client_release}
if header:
@@ -943,7 +946,7 @@
fobj.read()
trans_id = fobj.getheader("Transaction-ID", None)
except tx.TransportProtoError as e:
- if e.code == httplib.BAD_REQUEST:
+ if e.code == http_client.BAD_REQUEST:
exc_type, exc_value, exc_tb = sys.exc_info()
try:
e.details = self._parse_html_error(
@@ -951,7 +954,7 @@
except:
# If parse fails, raise original
# exception.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
raise
finally:
fobj.close()
@@ -1050,7 +1053,7 @@
"""Invoke HTTP HEAD to send manifest intent data."""
baseurl = self.__get_request_url("manifest/0/", pub=pub)
- requesturl = urlparse.urljoin(baseurl, mfst)
+ requesturl = urljoin(baseurl, mfst)
resp = self._fetch_url_header(requesturl, header,
ccancel=ccancel)
@@ -1161,9 +1164,9 @@
try:
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(self._repouri.uri, "file",
+ urlparse(self._repouri.uri, "file",
allow_fragments=0)
- path = urllib.url2pathname(path)
+ path = url2pathname(path)
self._frepo = svr_repo.Repository(read_only=True,
root=path)
except cfg.ConfigError as e:
@@ -1254,7 +1257,7 @@
# to the user.
if len(res_list) == 1:
try:
- tmp = res_list[0].next()
+ tmp = next(res_list[0])
res_list = [itertools.chain([tmp], res_list[0])]
except StopIteration:
self.__stats.record_tx()
@@ -1271,7 +1274,7 @@
fmri_str, fv, line = vals
yield "{0} {1} {2} {3} {4}\n".format(
i, return_type, fmri_str,
- urllib.quote(fv),
+ quote(fv),
line.rstrip())
elif return_type == \
sqp.Query.RETURN_PACKAGES:
@@ -1302,8 +1305,8 @@
# create URL for requests
for f in filelist:
try:
- url = urlparse.urlunparse(("file", None,
- urllib.pathname2url(self._frepo.catalog_1(f,
+ url = urlunparse(("file", None,
+ pathname2url(self._frepo.catalog_1(f,
pub=pub_prefix)), None, None, None))
except svr_repo.RepositoryError as e:
ex = tx.TransportProtoError("file",
@@ -1353,8 +1356,8 @@
pub_prefix = getattr(pub, "prefix", None)
try:
- requesturl = urlparse.urlunparse(("file", None,
- urllib.pathname2url(self._frepo.file(fhash,
+ requesturl = urlunparse(("file", None,
+ pathname2url(self._frepo.file(fhash,
pub=pub_prefix)), None, None, None))
except svr_repo.RepositoryFileNotFoundError as e:
ex = tx.TransportProtoError("file", errno.ENOENT,
@@ -1409,8 +1412,8 @@
pub_prefix = getattr(pub, "prefix", None)
try:
- requesturl = urlparse.urlunparse(("file", None,
- urllib.pathname2url(self._frepo.manifest(fmri,
+ requesturl = urlunparse(("file", None,
+ pathname2url(self._frepo.manifest(fmri,
pub=pub_prefix)), None, None, None))
except svr_repo.RepositoryError as e:
ex = tx.TransportProtoError("file", errno.EPROTO,
@@ -1439,8 +1442,8 @@
pre_exec_errors = []
for fmri, h in mfstlist:
try:
- url = urlparse.urlunparse(("file", None,
- urllib.pathname2url(self._frepo.manifest(
+ url = urlunparse(("file", None,
+ pathname2url(self._frepo.manifest(
fmri, pub=pub_prefix)), None, None, None))
except svr_repo.RepositoryError as e:
ex = tx.TransportProtoError("file",
@@ -1509,8 +1512,8 @@
pre_exec_errors = []
for f in filelist:
try:
- url = urlparse.urlunparse(("file", None,
- urllib.pathname2url(self._frepo.file(f,
+ url = urlunparse(("file", None,
+ pathname2url(self._frepo.file(f,
pub=pub_prefix)), None, None, None))
except svr_repo.RepositoryFileNotFoundError as e:
ex = tx.TransportProtoError("file",
@@ -1599,7 +1602,7 @@
buf.write("pkg-server {0}\n".format(pkg.VERSION))
buf.write("\n".join(
"{0} {1}".format(op, " ".join(vers))
- for op, vers in vops.iteritems()
+ for op, vers in six.iteritems(vops)
) + "\n")
buf.seek(0)
self.__stats.record_tx()
@@ -1837,11 +1840,11 @@
try:
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(self._repouri.uri, "file",
+ urlparse(self._repouri.uri, "file",
allow_fragments=0)
# Path must be rstripped of separators to be used as
# a file.
- path = urllib.url2pathname(path.rstrip(os.path.sep))
+ path = url2pathname(path.rstrip(os.path.sep))
self._arc = pkg.p5p.Archive(path, mode="r")
except pkg.p5p.InvalidArchive as e:
ex = tx.TransportProtoError("file", errno.EINVAL,
@@ -2105,7 +2108,7 @@
buf.write("pkg-server {0}\n".format(pkg.VERSION))
buf.write("\n".join(
"{0} {1}".format(op, " ".join(vers))
- for op, vers in vops.iteritems()
+ for op, vers in six.iteritems(vops)
) + "\n")
buf.seek(0)
self.__stats.record_tx()
@@ -2155,7 +2158,7 @@
'repostats' is a RepoStats object.
'repouri' is a TransportRepoURI object.
-
+
'engine' is a transport engine object.
'frepo' is an optional Repository object to use instead
@@ -2166,9 +2169,9 @@
try:
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(repouri.uri, "file",
+ urlparse(repouri.uri, "file",
allow_fragments=0)
- path = urllib.url2pathname(path)
+ path = url2pathname(path)
except Exception as e:
ex = tx.TransportProtoError("file", errno.EPROTO,
reason=str(e), repourl=repostats.url)
--- a/src/modules/client/transport/stats.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/stats.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,9 +29,10 @@
import os
import datetime
import random
-import urlparse
+from six.moves.urllib.parse import urlsplit
import pkg.misc as misc
+
class RepoChooser(object):
"""An object that contains repo statistics. It applies algorithms
to choose an optimal set of repos for a given publisher, based
@@ -202,7 +203,7 @@
repository URI."""
self.__url = repouri.uri.rstrip("/")
- self.__scheme = urlparse.urlsplit(self.__url)[0]
+ self.__scheme = urlsplit(self.__url)[0]
self.__priority = repouri.priority
self.__proxy = repouri.proxy
--- a/src/modules/client/transport/transport.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/client/transport/transport.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,12 +27,13 @@
import cStringIO
import copy
import errno
-import httplib
import os
import simplejson as json
+import six
import statvfs
import tempfile
import zlib
+from six.moves import http_client, range
import pkg.catalog as catalog
import pkg.client.api_errors as apx
@@ -163,7 +164,7 @@
if isinstance(pub, publisher.Publisher):
pub = pub.prefix
- elif not pub or not isinstance(pub, basestring):
+ elif not pub or not isinstance(pub, six.string_types):
pub = None
caches = [
@@ -677,12 +678,12 @@
failures.extend(ex.failures)
except tx.TransportProtoError as e:
- if e.code in (httplib.NOT_FOUND, errno.ENOENT):
+ if e.code in (http_client.NOT_FOUND, errno.ENOENT):
raise apx.UnsupportedSearchError(e.url,
"search/1")
- elif e.code == httplib.NO_CONTENT:
+ elif e.code == http_client.NO_CONTENT:
no_result_url = e.url
- elif e.code in (httplib.BAD_REQUEST,
+ elif e.code in (http_client.BAD_REQUEST,
errno.EINVAL):
raise apx.MalformedSearchRequest(e.url)
elif e.retryable:
@@ -774,7 +775,7 @@
# failures that it contains
failures.extend(ex.failures)
except tx.TransportProtoError as e:
- if e.code == httplib.NOT_MODIFIED:
+ if e.code == http_client.NOT_MODIFIED:
return
elif e.retryable:
failures.append(e)
@@ -979,7 +980,7 @@
try:
self._verify_catalog(s, download_dir)
except tx.InvalidContentException as e:
- repostats.record_error(content=True)
+ repostats.record_error(content=True)
failedreqs.append(e.request)
failures.append(e)
if not flist:
@@ -1218,7 +1219,7 @@
failures.extend(e.failures)
except (TypeError, ValueError) as e:
-
+
exc = tx.TransferContentException(
repouri_key[0],
"Invalid stats response: {0}".format(e),
@@ -1586,7 +1587,7 @@
continue
try:
- mf = file(dl_path)
+ mf = open(dl_path)
mcontent = mf.read()
mf.close()
manifest.FactoredManifest(fmri,
@@ -1676,7 +1677,7 @@
return False
if mfstpath:
- mf = file(mfstpath)
+ mf = open(mfstpath)
mcontent = mf.read()
mf.close()
elif content is not None:
@@ -1842,7 +1843,7 @@
dl_path)
except tx.InvalidContentException as e:
mfile.subtract_progress(e.size)
- e.request = s
+ e.request = s
repostats.record_error(content=True)
failedreqs.append(s)
failures.append(e)
@@ -2142,7 +2143,7 @@
fail = None
iteration = 0
- for i in xrange(count):
+ for i in range(count):
iteration += 1
rslist = self.stats.get_repostats(repolist, origins)
if prefer_remote:
@@ -3027,9 +3028,15 @@
def __len__(self):
return len(self._hash)
- def __nonzero__(self):
+ # Defining "boolness" of a class, Python 2 uses the special method
+ # called __nonzero__() while Python 3 uses __bool__(). For Python
+ # 2 and 3 compatibility, define __bool__() only, and let
+ # __nonzero__ = __bool__
+ def __bool__(self):
return bool(self._hash)
+ __nonzero__ = __bool__
+
def add_hash(self, hashval, item):
"""Add 'item' to list of values that exist for
hash value 'hashval'."""
@@ -3068,7 +3075,7 @@
def keys(self):
"""Return a list of the keys in the hash."""
- return self._hash.keys()
+ return list(self._hash.keys())
class MultiFile(MultiXfr):
@@ -3315,7 +3322,7 @@
e.filename)
raise
- src = file(current_path, "rb")
+ src = open(current_path, "rb")
outfile = os.fdopen(fd, "wb")
if self._decompress:
misc.gunzip_from_stream(src, outfile, ignore_hash=True)
@@ -3343,7 +3350,7 @@
# need to configure a transport and or publishers.
def setup_publisher(repo_uri, prefix, xport, xport_cfg,
- remote_prefix=False, remote_publishers=False, ssl_key=None,
+ remote_prefix=False, remote_publishers=False, ssl_key=None,
ssl_cert=None):
"""Given transport 'xport' and publisher configuration 'xport_cfg'
take the string that identifies a repository by uri in 'repo_uri'
--- a/src/modules/config.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/config.py Wed Jul 01 16:20:01 2015 -0700
@@ -46,7 +46,6 @@
manipulation of configuration data is needed.
"""
-import ConfigParser
import ast
import codecs
import commands
@@ -55,9 +54,12 @@
import os
import re
import shlex
+import six
import stat
import tempfile
import uuid
+from six import python_2_unicode_compatible
+from six.moves import configparser
from pkg import misc, portable
import pkg.version
@@ -209,6 +211,7 @@
self.section)
+@python_2_unicode_compatible
class Property(object):
"""Base class for properties."""
@@ -219,7 +222,7 @@
_value_map = misc.EmptyDict
def __init__(self, name, default="", value_map=misc.EmptyDict):
- if not isinstance(name, basestring) or \
+ if not isinstance(name, six.string_types) or \
not self.__name_re.match(name):
raise InvalidPropertyNameError(prop=name)
try:
@@ -256,22 +259,17 @@
return self.__class__(self.name, default=self.value,
value_map=self._value_map)
- def __unicode__(self):
- if isinstance(self.value, unicode):
+ def __str__(self):
+ if isinstance(self.value, six.text_type):
return self.value
# Assume that value can be represented in utf-8.
- return unicode(self.__str__(), "utf-8")
-
- def __str__(self):
- if isinstance(self.value, unicode):
- return self.value.encode("utf-8")
- return str(self.value)
+ return six.text_type(str(self.value), "utf-8")
def _is_allowed(self, value):
"""Raises an InvalidPropertyValueError if 'value' is not allowed
for this property.
"""
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
# Only string values are allowed.
raise InvalidPropertyValueError(prop=self.name,
value=value)
@@ -305,7 +303,7 @@
@value.setter
def value(self, value):
"""Sets the property's value."""
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None:
value = ""
@@ -327,7 +325,7 @@
def __init__(self, name_pattern, allowed=None, default=None,
prop_type=Property, value_map=None):
assert prop_type
- if not isinstance(name_pattern, basestring) or not name_pattern:
+ if not isinstance(name_pattern, six.string_types) or not name_pattern:
raise InvalidPropertyTemplateNameError(
prop=name_pattern)
self.__name = name_pattern
@@ -386,12 +384,12 @@
@Property.value.setter
def value(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None or value == "":
self._value = False
return
- elif isinstance(value, basestring):
+ elif isinstance(value, six.string_types):
if value.lower() == "true":
self._value = True
return
@@ -434,7 +432,7 @@
@Property.value.setter
def value(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None or value == "":
value = 0
@@ -459,13 +457,13 @@
@Property.value.setter
def value(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None or value == "":
self._value = ""
return
- if not isinstance(value, basestring) or \
+ if not isinstance(value, six.string_types) or \
not misc.valid_pub_prefix(value):
# Only string values are allowed.
raise InvalidPropertyValueError(prop=self.name,
@@ -547,11 +545,11 @@
@PropDefined.value.setter
def value(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None or value == "":
value = []
- elif isinstance(value, basestring):
+ elif isinstance(value, six.string_types):
value = self._parse_str(value)
if not isinstance(value, list):
# Only accept lists for literal string form.
@@ -570,7 +568,7 @@
v = ""
elif isinstance(v, (bool, int)):
v = str(v)
- elif not isinstance(v, basestring):
+ elif not isinstance(v, six.string_types):
# Only string values are allowed.
raise InvalidPropertyValueError(prop=self.name,
value=value)
@@ -592,11 +590,11 @@
@PropDefined.value.setter
def value(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None or value == "":
value = []
- elif isinstance(value, basestring):
+ elif isinstance(value, six.string_types):
value = self._parse_str(value)
if not isinstance(value, list):
# Only accept lists for literal string form.
@@ -660,7 +658,7 @@
continue
Property._is_allowed(self, val)
-
+@python_2_unicode_compatible
class PropSimpleList(PropList):
"""Class representing a property with a list of string values that are
simple in nature. Output is in a comma-separated format that may not
@@ -698,7 +696,7 @@
try:
v = v.encode("ascii")
except ValueError:
- if not isinstance(v, unicode):
+ if not isinstance(v, six.text_type):
try:
v = v.decode("utf-8")
except ValueError:
@@ -710,18 +708,13 @@
result.append(v)
return result
- def __unicode__(self):
+ def __str__(self):
if self.value and len(self.value):
# Performing the join using a unicode string results in
# a single unicode string object.
return u",".join(self.value)
return u""
- def __str__(self):
- if self.value and len(self.value):
- return ",".join([v.encode("utf-8") for v in self.value])
- return ""
-
class PropPubURI(Property):
"""Class representing publisher URI properties."""
@@ -843,7 +836,7 @@
@Property.value.setter
def value(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self._value_map.get(value, value)
if value is None or value == "":
value = "0"
@@ -860,6 +853,7 @@
self._value = nvalue
+@python_2_unicode_compatible
class PropertySection(object):
"""A class representing a section of the configuration that also
provides an interface for adding and managing properties and sections
@@ -872,7 +866,7 @@
__name_re = re.compile(r"\A[^\t\n\r\f\v\\/]+\Z")
def __init__(self, name, properties=misc.EmptyI):
- if not isinstance(name, basestring) or \
+ if not isinstance(name, six.string_types) or \
not self.__name_re.match(name) or \
name == "CONFIGURATION":
raise InvalidSectionNameError(name)
@@ -897,11 +891,8 @@
propsec.add_property(copy.copy(p))
return propsec
- def __unicode__(self):
- return unicode(self.name)
-
def __str__(self):
- return self.name
+ return six.text_type(self.name)
def add_property(self, prop):
"""Adds the specified property object to the section. The
@@ -915,7 +906,7 @@
name."""
return dict(
(pname, p.value)
- for pname, p in self.__properties.iteritems()
+ for pname, p in six.iteritems(self.__properties)
)
def get_property(self, name):
@@ -930,7 +921,7 @@
def get_properties(self):
"""Returns a generator that yields the list of property objects.
"""
- return self.__properties.itervalues()
+ return six.itervalues(self.__properties)
def remove_property(self, name):
"""Removes any matching property object from the section."""
@@ -954,7 +945,7 @@
"""
def __init__(self, name_pattern, properties=misc.EmptyI):
- if not isinstance(name_pattern, basestring) or not name_pattern:
+ if not isinstance(name_pattern, six.string_types) or not name_pattern:
raise InvalidSectionTemplateNameError(
section=name_pattern)
self.__name = name_pattern
@@ -997,6 +988,7 @@
return self.__name
+@python_2_unicode_compatible
class Config(object):
"""The Config class provides basic in-memory management of configuration
data."""
@@ -1034,35 +1026,23 @@
self._version = version
self.reset(overrides=overrides)
- def __unicode__(self):
+ def __str__(self):
"""Returns a unicode object representation of the configuration
object.
"""
out = u""
for sec, props in self.get_properties():
- out += "[{0}]\n".format(sec.name)
+ out += u"[{0}]\n".format(sec.name)
for p in props:
- out += u"{0} = {1}\n".format(p.name, unicode(p))
- out += "\n"
- return out
-
- def __str__(self):
- """Returns a string representation of the configuration
- object.
- """
- out = ""
- for sec, props in self.get_properties():
- out += "[{0}]\n".format(sec.name)
- for p in props:
- out += "{0} = {1}\n".format(p.name, str(p))
- out += "\n"
+ out += u"{0} = {1}\n".format(p.name, six.text_type(p))
+ out += u"\n"
return out
def _get_matching_property(self, section, name, default_type=Property):
"""Returns the Property object matching the given name for
the given PropertySection object, or adds a new one (if it
does not already exist) based on class definitions.
-
+
'default_type' is an optional parameter specifying the type of
property to create if a class definition does not exist for the
given property.
@@ -1179,8 +1159,8 @@
map(secobj.remove_property, elide)
self.add_section(secobj)
- for sname, props in overrides.iteritems():
- for pname, val in props.iteritems():
+ for sname, props in six.iteritems(overrides):
+ for pname, val in six.iteritems(props):
self.set_property(sname, pname, val)
def add_property_value(self, section, name, value):
@@ -1273,7 +1253,7 @@
def get_sections(self):
"""Returns a generator that yields the list of property section
objects."""
- return self.__sections.itervalues()
+ return six.itervalues(self.__sections)
def remove_property(self, section, name):
"""Remove the property object matching the given section and
@@ -1389,8 +1369,8 @@
}
"""
- for section, props in properties.iteritems():
- for pname, pval in props.iteritems():
+ for section, props in six.iteritems(properties):
+ for pname, pval in six.iteritems(props):
self.set_property(section, pname, pval)
@property
@@ -1473,7 +1453,7 @@
"""
# First, attempt to read the target.
- cp = ConfigParser.RawConfigParser()
+ cp = configparser.RawConfigParser()
# Disabled ConfigParser's inane option transformation to ensure
# option case is preserved.
cp.optionxform = lambda x: x
@@ -1501,8 +1481,8 @@
try:
version = cp.getint("CONFIGURATION", "version")
self._version = version
- except (ConfigParser.NoSectionError,
- ConfigParser.NoOptionError, ValueError):
+ except (configparser.NoSectionError,
+ configparser.NoOptionError, ValueError):
# Assume current version.
pass
@@ -1572,7 +1552,7 @@
if os.path.exists(self._target) and not self._dirty:
return
- cp = ConfigParser.RawConfigParser()
+ cp = configparser.RawConfigParser()
# Disabled ConfigParser's inane option transformation to ensure
# option case is preserved.
cp.optionxform = lambda x: x
@@ -1807,11 +1787,11 @@
# shlex.split() automatically does escaping for a list of values
# so no need to do it here.
- for section, props in cfgdata.iteritems():
+ for section, props in six.iteritems(cfgdata):
if section == "CONFIGURATION":
# Reserved for configuration file management.
continue
- for prop, value in props.iteritems():
+ for prop, value in six.iteritems(props):
if section in overrides and \
prop in overrides[section]:
continue
--- a/src/modules/cpiofile.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/cpiofile.py Wed Jul 01 16:20:01 2015 -0700
@@ -44,6 +44,7 @@
import stat
import time
import struct
+from six.moves import range
import pkg.pkgsubprocess as subprocess
# cpio magic numbers
@@ -250,7 +251,7 @@
"""
if pos - self.pos >= 0:
blocks, remainder = divmod(pos - self.pos, self.bufsize)
- for i in xrange(blocks):
+ for i in range(blocks):
self.read(self.bufsize)
self.read(remainder)
else:
@@ -547,7 +548,7 @@
self.mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
if not fileobj and not cfobj:
- fileobj = file(self.name, self.mode)
+ fileobj = open(self.name, self.mode)
self._extfileobj = False
else:
# Copy constructor: just copy fileobj over and reset the
@@ -572,7 +573,7 @@
if self._mode == "r":
self.firstmember = None
- self.firstmember = self.next()
+ self.firstmember = next(self)
if self._mode == "a":
# Move to the end of the archive,
@@ -580,7 +581,7 @@
self.firstmember = None
while True:
try:
- cpioinfo = self.next()
+ cpioinfo = next(self)
except ReadError:
self.fileobj.seek(0)
break
@@ -696,7 +697,7 @@
cpioname = pre + ext
if fileobj is None:
- fileobj = file(name, mode + "b")
+ fileobj = open(name, mode + "b")
if mode != "r":
name = tarname
@@ -809,7 +810,7 @@
# scan the whole archive.
return self.members
- def next(self):
+ def __next__(self):
self._check("ra")
if self.firstmember is not None:
m = self.firstmember
@@ -850,6 +851,8 @@
self.members.append(cpioinfo)
return cpioinfo
+ next = __next__
+
def extractfile(self, member):
self._check("r")
@@ -878,13 +881,13 @@
else:
end = members.index(cpioinfo)
- for i in xrange(end - 1, -1, -1):
+ for i in range(end - 1, -1, -1):
if name == members[i].name:
return members[i]
def _load(self):
while True:
- cpioinfo = self.next()
+ cpioinfo = next(self)
if cpioinfo is None:
break
self._loaded = True
@@ -938,9 +941,9 @@
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
if not self.cpiofile._loaded:
- cpioinfo = self.cpiofile.next()
+ cpioinfo = next(self.cpiofile)
if not cpioinfo:
self.cpiofile._loaded = True
raise StopIteration
@@ -952,6 +955,8 @@
self.index += 1
return cpioinfo
+ next = __next__
+
def is_cpiofile(name):
magic = open(name).read(CMS_LEN)
--- a/src/modules/depotcontroller.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/depotcontroller.py Wed Jul 01 16:20:01 2015 -0700
@@ -23,17 +23,20 @@
#
from __future__ import print_function
-import httplib
import os
-import pkg.pkgsubprocess as subprocess
-import pkg.server.repository as sr
+import signal
+import six
import ssl
import sys
-import signal
import time
-import urllib
-import urllib2
-import urlparse
+
+from six.moves import http_client, range
+from six.moves.urllib.error import HTTPError, URLError
+from six.moves.urllib.request import pathname2url, urlopen
+from six.moves.urllib.parse import urlunparse, urljoin
+
+import pkg.pkgsubprocess as subprocess
+import pkg.server.repository as sr
class DepotStateException(Exception):
@@ -160,7 +163,7 @@
root=self.__dir, writable_root=self.__writable_root)
def get_repo_url(self):
- return urlparse.urlunparse(("file", "", urllib.pathname2url(
+ return urlunparse(("file", "", pathname2url(
self.__dir), "", "", ""))
def set_readonly(self):
@@ -253,9 +256,9 @@
running depots."""
self.__nasty = nastiness
if self.__depot_handle != None:
- nastyurl = urlparse.urljoin(self.get_depot_url(),
+ nastyurl = urljoin(self.get_depot_url(),
"nasty/{0:d}".format(self.__nasty))
- url = urllib2.urlopen(nastyurl)
+ url = urlopen(nastyurl)
url.close()
def get_nasty(self):
@@ -279,21 +282,21 @@
def __network_ping(self):
try:
- repourl = urlparse.urljoin(self.get_depot_url(),
+ repourl = urljoin(self.get_depot_url(),
"versions/0")
# Disable SSL peer verification, we just want to check
# if the depot is running.
- url = urllib2.urlopen(repourl,
+ url = urlopen(repourl,
context=ssl._create_unverified_context())
url.close()
- except urllib2.HTTPError as e:
+ except HTTPError as e:
# Server returns NOT_MODIFIED if catalog is up
# to date
- if e.code == httplib.NOT_MODIFIED:
+ if e.code == http_client.NOT_MODIFIED:
return True
else:
return False
- except urllib2.URLError:
+ except URLError:
return False
return True
@@ -374,7 +377,7 @@
if self.__nasty_sleep:
args.append("--nasty-sleep {0:d}".format(self.__nasty_sleep))
for section in self.__props:
- for prop, val in self.__props[section].iteritems():
+ for prop, val in six.iteritems(self.__props[section]):
args.append("--set-property={0}.{1}='{2}'".format(
section, prop, val))
if self.__writable_root:
--- a/src/modules/facet.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/facet.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,11 +26,13 @@
# basic facet support
+import fnmatch
+import re
+import six
+import types
+
from pkg._varcet import _allow_facet
from pkg.misc import EmptyI, ImmutableDict
-import fnmatch
-import re
-import types
class Facets(dict):
# store information on facets; subclass dict
@@ -108,10 +110,10 @@
return [
[k, v, True]
- for k, v in obj.__inherited.iteritems()
+ for k, v in six.iteritems(obj.__inherited)
] + [
[k, v, False]
- for k, v in obj.__local.iteritems()
+ for k, v in six.iteritems(obj.__local)
]
@staticmethod
@@ -431,9 +433,9 @@
def update(self, d):
if type(d) == Facets:
# preserve inherited facets.
- for k, v in d.__inherited.iteritems():
+ for k, v in six.iteritems(d.__inherited):
self._set_inherited(k, v)
- for k, v in d.__local.iteritems():
+ for k, v in six.iteritems(d.__local):
self[k] = v
return
@@ -470,7 +472,7 @@
return rv
def items(self):
- return [a for a in self.iteritems()]
+ return [a for a in six.iteritems(self)]
def iteritems(self): # return in sorted order for display
for k in self.__keylist:
--- a/src/modules/flavor/base.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/flavor/base.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,12 +32,7 @@
from pkg.portable import PD_DEFAULT_RUNPATH
class DependencyAnalysisError(Exception):
-
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
class MissingFile(DependencyAnalysisError):
--- a/src/modules/flavor/depthlimitedmf.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/flavor/depthlimitedmf.py Wed Jul 01 16:20:01 2015 -0700
@@ -92,13 +92,6 @@
if __name__ == "__main__":
class MultipleDefaultRunPaths(Exception):
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this
- # provides a a standard wrapper for this class'
- # exceptions so that they have a chance of being
- # stringified correctly.
- return str(self)
-
def __str__(self):
return _(
"More than one $PKGDEPEND_RUNPATH token was set on "
--- a/src/modules/flavor/smf_manifest.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/flavor/smf_manifest.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,6 +25,7 @@
#
import os.path
+import six
import xml.dom.minidom as minidom
import xml.parsers
import xml.parsers.expat
@@ -51,7 +52,7 @@
"""
self.manifest = path
full_paths = None
- if isinstance(path, basestring):
+ if isinstance(path, six.string_types):
base_names = [os.path.basename(path)]
paths = [os.path.dirname(path)]
@@ -328,7 +329,7 @@
deps.append(SMFManifestDependency(action, manifest, pkg_vars,
action.attrs[PD_PROTO_DIR]))
pkg_attrs = {
- "org.opensolaris.smf.fmri": instance_mf.keys()
+ "org.opensolaris.smf.fmri": list(instance_mf.keys())
}
return deps, elist, pkg_attrs
--- a/src/modules/fmri.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/fmri.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,7 +26,7 @@
import fnmatch
import re
-import urllib
+from six.moves.urllib.parse import quote
from pkg.version import Version, VersionError
@@ -53,12 +53,6 @@
Exception.__init__(self)
self.fmri = fmri
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
class IllegalFmri(FmriError):
@@ -444,25 +438,25 @@
FMRI."""
if stemonly:
- return "{0}".format(urllib.quote(self.pkg_name, ""))
+ return "{0}".format(quote(self.pkg_name, ""))
if self.version is None:
raise MissingVersionError(self)
- return "{0}@{1}".format(urllib.quote(self.pkg_name, ""),
- urllib.quote(str(self.version), ""))
+ return "{0}@{1}".format(quote(self.pkg_name, ""),
+ quote(str(self.version), ""))
def get_dir_path(self, stemonly = False):
"""Return the escaped directory path fragment for this FMRI."""
if stemonly:
- return "{0}".format(urllib.quote(self.pkg_name, ""))
+ return "{0}".format(quote(self.pkg_name, ""))
if self.version is None:
raise MissingVersionError(self)
- return "{0}/{1}".format(urllib.quote(self.pkg_name, ""),
- urllib.quote(self.version.__str__(), ""))
+ return "{0}/{1}".format(quote(self.pkg_name, ""),
+ quote(self.version.__str__(), ""))
def get_url_path(self):
"""Return the escaped URL path fragment for this FMRI.
@@ -471,8 +465,8 @@
if self.version is None:
raise MissingVersionError(self)
- return "{0}@{1}".format(urllib.quote(self.pkg_name, ""),
- urllib.quote(self.version.__str__(), ""))
+ return "{0}@{1}".format(quote(self.pkg_name, ""),
+ quote(self.version.__str__(), ""))
def is_same_pkg(self, other):
"""Return true if these packages are the same (although
--- a/src/modules/indexer.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/indexer.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,7 +28,7 @@
import os
import platform
import shutil
-import urllib
+from six.moves.urllib.parse import unquote
import pkg.fmri as fmri
import pkg.lockfile as lockfile
@@ -181,7 +181,7 @@
def _build_version(vers):
""" Private method for building versions from a string. """
- return pkg.version.Version(urllib.unquote(vers), None)
+ return pkg.version.Version(unquote(vers), None)
def _read_input_indexes(self, directory):
""" Opens all index files using consistent_open and reads all
@@ -226,7 +226,7 @@
self._sort_file_bytes = 0
tmp_file_name = os.path.join(self._tmp_dir,
SORT_FILE_PREFIX + str(self._sort_file_num - 1))
- tmp_fh = file(tmp_file_name, "rb", buffering=PKG_FILE_BUFSIZ)
+ tmp_fh = open(tmp_file_name, "rb", buffering=PKG_FILE_BUFSIZ)
l = [
(ss.IndexStoreMainDict.parse_main_dict_line_for_token(line),
line)
@@ -234,7 +234,7 @@
]
tmp_fh.close()
l.sort()
- tmp_fh = file(tmp_file_name, "wb", buffering=PKG_FILE_BUFSIZ)
+ tmp_fh = open(tmp_file_name, "wb", buffering=PKG_FILE_BUFSIZ)
tmp_fh.writelines((line for tok, line in l))
tmp_fh.close()
@@ -302,7 +302,7 @@
#
# First pass determines whether a fast update makes sense and
# updates the list of fmris that will be in the index.
- #
+ #
filters, pkgplan_list = filters_pkgplan_list
for p in pkgplan_list:
d_fmri, o_fmri = p
@@ -413,13 +413,13 @@
self._progtrack.job_add_progress(
self._progtrack.JOB_REBUILD_SEARCH, nitems=0)
if at not in self.at_fh:
- self.at_fh[at] = file(os.path.join(out_dir,
+ self.at_fh[at] = open(os.path.join(out_dir,
"__at_" + at), "wb")
self.at_fh[at].write(cur_location + "\n")
for st, fv_list in st_list:
if st not in self.st_fh:
self.st_fh[st] = \
- file(os.path.join(out_dir,
+ open(os.path.join(out_dir,
"__st_" + st), "wb")
self.st_fh[st].write(cur_location + "\n")
for fv, p_list in fv_list:
@@ -467,7 +467,7 @@
try:
return \
ss.IndexStoreMainDict.parse_main_dict_line(
- fh.next())
+ next(fh))
except StopIteration:
return None
@@ -526,7 +526,7 @@
self.__splice(res,
new_info)
new_tok, new_info = \
- ss.IndexStoreMainDict.parse_main_dict_line(fh_dict[i].next())
+ ss.IndexStoreMainDict.parse_main_dict_line(next(fh_dict[i]))
cur_toks[i] = new_tok, new_info
except StopIteration:
# When a StopIteration happens, the
@@ -586,7 +586,7 @@
new_toks_available = True
new_toks_it = self._gen_new_toks_from_files()
try:
- tmp = new_toks_it.next()
+ tmp = next(new_toks_it)
next_new_tok, new_tok_info = tmp
except StopIteration:
new_toks_available = False
@@ -629,7 +629,7 @@
new_tok_info, out_dir)
try:
next_new_tok, new_tok_info = \
- new_toks_it.next()
+ next(new_toks_it)
except StopIteration:
new_toks_available = False
del next_new_tok
@@ -643,7 +643,7 @@
new_tok_info)
try:
next_new_tok, new_tok_info = \
- new_toks_it.next()
+ next(new_toks_it)
except StopIteration:
new_toks_available = False
del next_new_tok
@@ -666,7 +666,7 @@
new_tok_info, out_dir)
try:
next_new_tok, new_tok_info = \
- new_toks_it.next()
+ next(new_toks_it)
except StopIteration:
new_toks_available = False
finally:
@@ -767,7 +767,7 @@
elif input_type == IDX_INPUT_TYPE_FMRI:
assert not self._sort_fh
- self._sort_fh = file(os.path.join(self._tmp_dir,
+ self._sort_fh = open(os.path.join(self._tmp_dir,
SORT_FILE_PREFIX +
str(self._sort_file_num)), "wb")
self._sort_file_num += 1
--- a/src/modules/lint/base.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/lint/base.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,24 +26,21 @@
import inspect
import os.path
-import ConfigParser
+import six
+import traceback
+from six.moves import configparser
import pkg.variant as variant
-import traceback
+
class LintException(Exception):
"""An exception thrown when something fatal has gone wrong during
the linting."""
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
class DuplicateLintedAttrException(Exception):
"""An exception thrown when we've found duplicate pkg.linted* keys."""
- def __unicode__(self):
- return str(self)
+ pass
class Checker(object):
"""A base class for all lint checks. pkg.lint.engine discovers classes
@@ -84,7 +81,10 @@
'pkglint_id' keyword argument default and returns it."""
# the short name for this checker class, Checker.name
- name = method.im_class.name
+ if six.PY3:
+ name = method.__self__.__class__.name
+ else:
+ name = method.im_class.name
arg_spec = inspect.getargspec(method)
@@ -247,7 +247,7 @@
if os.path.exists(self.classification_path):
try:
self.classification_data = \
- ConfigParser.SafeConfigParser()
+ configparser.SafeConfigParser()
self.classification_data.readfp(
open(self.classification_path))
except Exception as err:
@@ -346,7 +346,7 @@
for key in action.attrs.keys():
if key.startswith("pkg.linted") and linted.startswith(key):
val = action.attrs.get(key, "false")
- if isinstance(val, basestring):
+ if isinstance(val, six.string_types):
if val.lower() == "true":
return True
else:
@@ -362,7 +362,7 @@
for key in manifest.attributes.keys():
if key.startswith("pkg.linted") and linted.startswith(key):
val = manifest.attributes.get(key, "false")
- if isinstance(val, basestring):
+ if isinstance(val, six.string_types):
if val.lower() == "true":
return True
else:
--- a/src/modules/lint/config.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/lint/config.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,9 +27,10 @@
# aspects of pkglint configuration
-import ConfigParser
import os
+from six.moves import configparser
+
defaults = {
"log_level": "INFO",
"do_pub_checks": "True",
@@ -48,11 +49,7 @@
"""An exception thrown when something fatal happens while reading the
config.
"""
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
class PkglintConfig(object):
def __init__(self, config_file=None):
@@ -62,13 +59,13 @@
# ConfigParser doesn't do a good job of
# error reporting, so we'll just try to open
# the file
- file(config_file, "r").close()
+ open(config_file, "r").close()
except (EnvironmentError) as err:
raise PkglintConfigException(
_("unable to read config file: {0} ").format(
err))
try:
- self.config = ConfigParser.SafeConfigParser(defaults)
+ self.config = configparser.SafeConfigParser(defaults)
if not config_file:
self.config.readfp(
open("/usr/share/lib/pkg/pkglintrc"))
@@ -79,7 +76,7 @@
# sanity check our config by looking for a known key
self.config.get("pkglint", "log_level")
- except ConfigParser.Error as err:
+ except configparser.Error as err:
raise PkglintConfigException(
_("missing or corrupt pkglintrc file "
"{config_file}: {err}").format(**locals()))
--- a/src/modules/lint/engine.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/lint/engine.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,12 +34,13 @@
from pkg.client.api_errors import ApiException
from pkg.version import DotSequence, Version
-import ConfigParser
import logging
import os
import shutil
+import six
import sys
-import urllib2
+from six.moves import configparser
+from six.moves.urllib.parse import urlparse, quote
PKG_CLIENT_NAME = "pkglint"
CLIENT_API_VERSION = 82
@@ -48,21 +49,13 @@
class LintEngineException(Exception):
"""An exception thrown when something fatal goes wrong with the engine,
such that linting can no longer continue."""
+ pass
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
class LintEngineSetupException(LintEngineException):
"""An exception thrown when the engine failed to complete its setup."""
+ pass
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
class LintEngineCache():
"""This class provides two caches for the LintEngine. A cache of the
@@ -420,13 +413,13 @@
excl = ""
else:
excl = excl.split()
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
pass
try:
self.version_pattern = conf.get("pkglint",
"version.pattern")
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
pass
for key, value in conf.items("pkglint"):
@@ -464,19 +457,19 @@
try:
self.do_pub_checks = conf.getboolean("pkglint",
"do_pub_checks")
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
pass
try:
self.use_tracker = conf.get("pkglint",
"use_progress_tracker").lower() == "true"
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
pass
try:
self.ignore_pubs = conf.get("pkglint",
"ignore_different_publishers").lower() == "true"
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
pass
return conf
@@ -876,9 +869,9 @@
# file:// URI, and get the absolute path. Missing or invalid
# repositories will be caught by pkg.client.api.image_create.
for i, uri in enumerate(repo_uris):
- if not urllib2.urlparse.urlparse(uri).scheme:
+ if not urlparse(uri).scheme:
repo_uris[i] = "file://{0}".format(
- urllib2.quote(os.path.abspath(uri)))
+ quote(os.path.abspath(uri)))
try:
api_inst = pkg.client.api.image_create(
@@ -1185,7 +1178,7 @@
if manifest and param_key in manifest:
val = manifest[param_key]
if val:
- if isinstance(val, basestring):
+ if isinstance(val, six.string_types):
return val
else:
return " ".join(val)
@@ -1193,7 +1186,7 @@
val = self.conf.get("pkglint", key)
if val:
return val.replace("\n", " ")
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
return None
def get_attr_action(self, attr, manifest):
--- a/src/modules/lint/log.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/lint/log.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,8 +25,9 @@
#
import logging
+import os
+import six
import sys
-import os
from pkg.lint.base import DuplicateLintedAttrException, linted
@@ -57,9 +58,6 @@
self.producer = producer
self.msgid = msgid
- def __unicode__(self):
- return str(self.msg)
-
def __str__(self):
return str(self.msg)
--- a/src/modules/lint/pkglint_action.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/lint/pkglint_action.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,6 +34,7 @@
from pkg.actions import ActionError
from pkg.actions.file import FileAction
import re
+import six
import stat
import string
@@ -132,7 +133,7 @@
variants = action.get_variant_template()
variants.merge_unknown(pkg_vars)
# Action attributes must be lists or strings.
- for k, v in variants.iteritems():
+ for k, v in six.iteritems(variants):
if isinstance(v, set):
action.attrs[k] = list(v)
else:
@@ -1387,7 +1388,7 @@
if "fmri" not in action.attrs:
return
fmris = action.attrs["fmri"]
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = [fmris]
for fmri in fmris:
--- a/src/modules/lint/pkglint_manifest.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/lint/pkglint_manifest.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,11 +26,14 @@
# Some pkg(5) specific lint manifest checks
-from pkg.lint.engine import lint_fmri_successor
+import os.path
+import six
+from six.moves import configparser
+
import pkg.fmri as fmri
import pkg.lint.base as base
-import os.path
-import ConfigParser
+from pkg.lint.engine import lint_fmri_successor
+
class PkgManifestChecker(base.ManifestChecker):
"""A class to check manifests."""
@@ -74,7 +77,7 @@
continue
dep = action.attrs["fmri"]
try:
- if isinstance(dep, basestring):
+ if isinstance(dep, six.string_types):
f = fmri.PkgFmri(dep)
dic.setdefault(
f.get_name(), []
@@ -416,7 +419,7 @@
continue
deps = action.attrs["fmri"]
- if isinstance(deps, basestring):
+ if isinstance(deps, six.string_types):
deps = [deps]
for dep in deps:
@@ -559,7 +562,7 @@
"category").split(",")
if category not in ref_categories:
valid_value = False
- except ConfigParser.NoSectionError:
+ except configparser.NoSectionError:
sections = self.classification_data.sections()
engine.error(_("info.classification value {value} "
"does not contain one of the valid sections "
@@ -569,7 +572,7 @@
fmri=fmri),
msgid="{0}.4".format(msgid))
return
- except ConfigParser.NoOptionError:
+ except configparser.NoOptionError:
engine.error(_("Invalid info.classification value for "
"{fmri}: data file {file} does not have a "
"'category' key for section {section}.").format(
--- a/src/modules/manifest.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/manifest.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,9 +32,11 @@
import hashlib
import os
import re
+import six
import tempfile
-from itertools import groupby, chain, product, repeat, izip
+from itertools import groupby, chain, product, repeat
from operator import itemgetter
+from six.moves import zip
import pkg.actions as actions
import pkg.client.api_errors as apx
@@ -58,7 +60,7 @@
re.compile(fnmatch.translate(pat), re.IGNORECASE).match
for pat in pats
])
- for (key, pats) in fn_pats.iteritems()
+ for (key, pats) in six.iteritems(fn_pats)
)
@@ -71,7 +73,7 @@
if not attr_match:
return True
- for (attr, matches) in attr_match.iteritems():
+ for (attr, matches) in six.iteritems(attr_match):
if attr in action.attrs:
for match in matches:
for attrval in action.attrlist(attr):
@@ -242,8 +244,8 @@
sdict = dict(dictify(self, self_exclude))
odict = dict(dictify(origin, origin_exclude))
- sset = set(sdict.iterkeys())
- oset = set(odict.iterkeys())
+ sset = set(six.iterkeys(sdict))
+ oset = set(six.iterkeys(odict))
added = [(None, sdict[i]) for i in sset - oset]
removed = [(odict[i], None) for i in oset - sset]
@@ -397,7 +399,7 @@
a.attrs.get("mediator-implementation"))
mediators = self._actions_to_dict(gen_references)
- for mediation, mvariants in mediators.iteritems():
+ for mediation, mvariants in six.iteritems(mediators):
values = {
"mediator-priority": mediation[1],
"mediator-version": mediation[2],
@@ -408,12 +410,12 @@
"value={0} {1} {2}\n".format(mediation[0],
" ".join((
"=".join(t)
- for t in values.iteritems()
+ for t in six.iteritems(values)
if t[1]
)),
" ".join((
"=".join(t)
- for t in mvariant.iteritems()
+ for t in six.iteritems(mvariant)
))
)
yield a
@@ -458,7 +460,7 @@
afacets = []
avariants = []
- for attr, val in attrs.iteritems():
+ for attr, val in six.iteritems(attrs):
if attr[:8] == "variant.":
variants[attr].add(val)
avariants.append((attr, val))
@@ -564,7 +566,7 @@
# used by a single variant (think i386-only or
# sparc-only content) would be seen unvarianted
# (that's bad).
- vfacets = facets.values()
+ vfacets = list(facets.values())
vcfacets = vfacets[0].intersection(*vfacets[1:])
if vcfacets:
@@ -605,7 +607,7 @@
# Now emit a pkg.facet action for each variant
# combination containing the list of facets unique to
# that combination.
- for varkey, fnames in facets.iteritems():
+ for varkey, fnames in six.iteritems(facets):
# A unique key for each combination is needed,
# and using a hash obfuscates that interface
# while giving us a reliable way to generate
@@ -974,7 +976,7 @@
lineno = 0
errors = []
- if isinstance(content, basestring):
+ if isinstance(content, six.string_types):
# Get an iterable for the string.
content = content.splitlines()
@@ -1046,7 +1048,7 @@
content = mfile.read()
except EnvironmentError as e:
raise apx._convert_error(e)
- if isinstance(content, basestring):
+ if isinstance(content, six.string_types):
if signatures:
# Generate manifest signature based upon
# input content, but only if signatures
@@ -1191,7 +1193,7 @@
log = lambda x: None
try:
- file_handle = file(file_path, "rb")
+ file_handle = open(file_path, "rb")
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
@@ -1283,7 +1285,7 @@
# This must be an SHA-1 hash in order to interoperate with
# older clients.
sha_1 = hashlib.sha1()
- if isinstance(mfstcontent, unicode):
+ if isinstance(mfstcontent, six.text_type):
# Byte stream expected, so pass encoded.
sha_1.update(mfstcontent.encode("utf-8"))
else:
@@ -1449,7 +1451,7 @@
continue
try:
- for v, d in izip(v_list, repeat(variants)):
+ for v, d in zip(v_list, repeat(variants)):
d[v].add(attrs[v])
if not excludes or action.include_this(
@@ -1460,7 +1462,7 @@
# from the current action should only be
# included if the action is not
# excluded.
- for v, d in izip(f_list, repeat(facets)):
+ for v, d in zip(f_list, repeat(facets)):
d[v].add(attrs[v])
except TypeError:
# Lists can't be set elements.
@@ -1615,7 +1617,7 @@
# so that empty cache files are created if no action of that
# type exists for the package (avoids full manifest loads
# later).
- for n, acts in self.actions_bytype.iteritems():
+ for n, acts in six.iteritems(self.actions_bytype):
t_prefix = "manifest.{0}.".format(n)
try:
--- a/src/modules/mediator.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/mediator.py Wed Jul 01 16:20:01 2015 -0700
@@ -23,6 +23,7 @@
# Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved.
import re
+import six
import pkg.version as version
@@ -31,7 +32,7 @@
string is a valid name for a link mediation. 'valid' is a boolean
and 'error' is None or a string containing the error."""
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
if re.match("^[a-zA-Z0-9\-]+$", value):
return True, None
return False, _("'{0}' is not a valid mediator; only alphanumeric "
@@ -43,7 +44,7 @@
a boolean and 'error' is None or a string containing the error."""
error = ""
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
try:
version.Version(value)
return True, None
@@ -62,7 +63,7 @@
object representing the version. If the implementation is not valid
a tuple of (None, None) will be returned."""
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
return None, None
if "@" in value:
@@ -93,7 +94,7 @@
error = ""
iname = iver = None
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
if "@" in value:
iname, iver = value.rsplit("@", 1)
else:
--- a/src/modules/misc.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/misc.py Wed Jul 01 16:20:01 2015 -0700
@@ -37,7 +37,6 @@
import errno
import fnmatch
import getopt
-import itertools
import locale
import os
import platform
@@ -46,6 +45,7 @@
import shutil
import signal
import simplejson as json
+import six
import socket
import struct
import sys
@@ -53,11 +53,20 @@
import time
import traceback
import urllib
-import urlparse
import zlib
from collections import defaultdict
from operator import itemgetter
+# Pylint seems to be panic about six even if it is installed. Instead of using
+# 'disable' here, a better way is to use ignore-modules in pylintrc, but
+# it has an issue that is not fixed until recently. See pylint/issues/#223.
+# import-error; pylint: disable=F0401
+# no-name-in-module; pylint: disable=E0611
+# Redefining built-in 'range'; pylint: disable=W0622
+# Module 'urllib' has no 'parse' member; pylint: disable=E1101
+from six.moves import range, zip_longest
+from six.moves.urllib.parse import urlsplit, urlparse, urlunparse
+from six.moves.urllib.request import pathname2url, url2pathname
from stat import S_IFMT, S_IMODE, S_IRGRP, S_IROTH, S_IRUSR, S_IRWXU, \
S_ISBLK, S_ISCHR, S_ISDIR, S_ISFIFO, S_ISLNK, S_ISREG, S_ISSOCK, \
@@ -201,7 +210,7 @@
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
os.utime(dst, (src_stat.st_atime, src_stat.st_mtime))
if problem:
- raise problem[0], problem[1], problem[2]
+ six.reraise(problem[0], problem[1], problem[2])
def move(src, dst):
"""Rewrite of shutil.move() that uses our copy of copytree()."""
@@ -303,21 +312,24 @@
return False
# First split the URL and check if the scheme is one we support
- o = urlparse.urlsplit(url)
+ o = urlsplit(url)
if not o[0] in _valid_proto:
return False
if o[0] == "file":
- path = urlparse.urlparse(url, "file", allow_fragments=0)[2]
- path = urllib.url2pathname(path)
+ path = urlparse(url, "file", allow_fragments=0)[2]
+ path = url2pathname(path)
if not os.path.abspath(path):
return False
# No further validation to be done.
return True
# Next verify that the network location is valid
- host = urllib.splitport(o[1])[0]
+ if six.PY3:
+ host = urllib.parse.splitport(o[1])[0]
+ else:
+ host = urllib.splitport(o[1])[0]
if proxy:
# We may have authentication details in the proxy URI, which
@@ -581,8 +593,8 @@
bufsz = 128 * 1024
closefobj = False
- if isinstance(data, basestring):
- f = file(data, "rb", bufsz)
+ if isinstance(data, six.string_types):
+ f = open(data, "rb", bufsz)
closefobj = True
else:
f = data
@@ -831,7 +843,7 @@
}
# fill in <format string> and <namedtuple> in _struct_descriptions
- for struct_name, v in _struct_descriptions.iteritems():
+ for struct_name, v in six.iteritems(_struct_descriptions):
desc = v[0]
# update _struct_descriptions with a format string
@@ -882,7 +894,7 @@
psinfo_size = 288
try:
- psinfo_data = file("/proc/self/psinfo").read(
+ psinfo_data = open("/proc/self/psinfo").read(
psinfo_size)
# Catch "Exception"; pylint: disable=W0703
except Exception:
@@ -1098,7 +1110,7 @@
related publisher."""
try:
- cf = file(path, "rb")
+ cf = open(path, "rb")
certdata = cf.read()
cf.close()
except EnvironmentError as e:
@@ -1226,11 +1238,11 @@
elif not os.path.isabs(uri):
uri = os.path.normpath(os.path.join(cwd, uri))
- uri = urlparse.urlunparse(("file", "",
- urllib.pathname2url(uri), "", "", ""))
+ uri = urlunparse(("file", "",
+ pathname2url(uri), "", "", ""))
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(uri, "file", allow_fragments=0)
+ urlparse(uri, "file", allow_fragments=0)
scheme = scheme.lower()
if scheme == "file":
@@ -1240,7 +1252,7 @@
path = "/" + path.lstrip("/")
# Rebuild the URI with the sanitized components.
- return urlparse.urlunparse((scheme, netloc, path, params,
+ return urlunparse((scheme, netloc, path, params,
query, fragment))
@@ -1558,7 +1570,7 @@
pkg_cmd = [pkg_bin]
# propagate debug options
- for k, v in DebugValues.iteritems():
+ for k, v in six.iteritems(DebugValues):
pkg_cmd.append("-D")
pkg_cmd.append("{0}={1}".format(k, v))
@@ -1716,7 +1728,7 @@
1 for k in field_data
if filter_tsv(field_data[k])
)
- fmt = "\t".join('{{{0}}}'.format(x) for x in xrange(num_fields))
+ fmt = "\t".join('{{{0}}}'.format(x) for x in range(num_fields))
filter_func = filter_tsv
elif out_format == "json" or out_format == "json-formatted":
args = { "sort_keys": True }
@@ -1727,12 +1739,12 @@
# any explicitly named fields are only included if 'json'
# is explicitly listed.
def fmt_val(v):
- if isinstance(v, basestring):
+ if isinstance(v, six.string_types):
return v
if isinstance(v, (list, tuple, set, frozenset)):
return [fmt_val(e) for e in v]
if isinstance(v, dict):
- for k, e in v.iteritems():
+ for k, e in six.iteritems(v):
v[k] = fmt_val(e)
return v
return str(v)
@@ -1754,8 +1766,8 @@
# Extract the list of headers from the field_data dictionary. Ensure
# they are extracted in the desired order by using the custom sort
# function.
- hdrs = map(get_header, sorted(filter(filter_func, field_data.values()),
- sort_fields))
+ hdrs = map(get_header, sorted(filter(filter_func,
+ field_data.values()), sort_fields))
# Output a header if desired.
output = ""
@@ -1766,7 +1778,7 @@
for entry in field_values:
map(set_value, (
(field_data[f], v)
- for f, v in entry.iteritems()
+ for f, v in six.iteritems(entry)
if f in field_data
))
values = map(get_value, sorted(filter(filter_func,
@@ -1806,10 +1818,8 @@
raise api_errors._convert_error(e)
# valid json types
-if sys.version > '3':
- json_types_immediates = (bool, float, int, str, type(None))
-else:
- json_types_immediates = (bool, float, int, long, basestring, type(None))
+json_types_immediates = (bool, float, six.integer_types, six.string_types,
+ type(None))
json_types_collections = (dict, list)
json_types = tuple(json_types_immediates + json_types_collections)
json_debug = False
@@ -1913,7 +1923,7 @@
# strings (that way we're encoder/decoder independent).
obj_cache = je_state[1]
obj_cache2 = {}
- for obj_id, obj_state in obj_cache.itervalues():
+ for obj_id, obj_state in six.itervalues(obj_cache):
obj_cache2[str(obj_id)] = obj_state
data = { "json_state": data, "json_objects": obj_cache2 }
@@ -1949,7 +1959,7 @@
# necessary since we use the PkgDecoder hook function during json_decode
# to convert unicode objects back into escaped str objects, which would
# otherwise do that conversion unintentionally.
- assert not isinstance(data_type, unicode), \
+ assert not isinstance(data_type, six.text_type), \
"unexpected unicode string: {0}".format(data)
# we don't need to do anything for basic types
@@ -1970,7 +1980,7 @@
# lookup the first descriptor to see if we have
# generic type description.
- desc_k, desc_v = desc.items()[0]
+ desc_k, desc_v = list(desc.items())[0]
# if the key in the first type pair is a type then we
# have a generic type description that applies to all
@@ -1981,7 +1991,7 @@
assert len(desc) == 1
# encode all key / value pairs
- for k, v in data.iteritems():
+ for k, v in six.iteritems(data):
# encode the key
name2 = "{0}[{1}].key()".format(name, desc_k)
k2 = json_encode(name2, k, desc_k,
@@ -1999,7 +2009,7 @@
# we have element specific value type descriptions.
# encode the specific values.
rv.update(data)
- for desc_k, desc_v in desc.iteritems():
+ for desc_k, desc_v in six.iteritems(desc):
# check for the specific key
if desc_k not in rv:
continue
@@ -2017,7 +2027,8 @@
# we always return a new list
rv = []
- # check for an empty list since we use izip_longest
+ # check for an empty list since we use izip_longest(zip_longest
+ # in python 3)
if len(data) == 0:
return je_return(name, rv, finish, je_state)
@@ -2026,12 +2037,13 @@
rv.extend(data)
return je_return(name, rv, finish, je_state)
- # don't accidentally generate data via izip_longest
+ # don't accidentally generate data via izip_longest(zip_longest
+ # in python 3)
assert len(data) >= len(desc), \
"{0:d} >= {1:d}".format(len(data), len(desc))
i = 0
- for data2, desc2 in itertools.izip_longest(data, desc,
+ for data2, desc2 in zip_longest(data, desc,
fillvalue=list(desc)[0]):
name2 = "{0}[{1:d}]".format(name, i)
i += 1
@@ -2194,7 +2206,7 @@
# lookup the first descriptor to see if we have
# generic type description.
- desc_k, desc_v = desc.items()[0]
+ desc_k, desc_v = list(desc.items())[0]
# if the key in the descriptor is a type then we have
# a generic type description that applies to all keys
@@ -2205,7 +2217,7 @@
assert len(desc) == 1
# decode all key / value pairs
- for k, v in data.iteritems():
+ for k, v in six.iteritems(data):
# decode the key
name2 = "{0}[{1}].key()".format(name, desc_k)
k2 = json_decode(name2, k, desc_k,
@@ -2223,7 +2235,7 @@
# we have element specific value type descriptions.
# copy all data and then decode the specific values
rv.update(data)
- for desc_k, desc_v in desc.iteritems():
+ for desc_k, desc_v in six.iteritems(desc):
# check for the specific key
if desc_k not in rv:
continue
@@ -2240,7 +2252,8 @@
# get the return type
rvtype = type(desc)
- # check for an empty list since we use izip_longest
+ # check for an empty list since we use izip_longest(zip_longest
+ # in python 3)
if len(data) == 0:
rv = rvtype([])
return jd_return(name, rv, desc, finish, jd_state)
@@ -2250,13 +2263,14 @@
rv = rvtype(data)
return jd_return(name, rv, desc, finish, jd_state)
- # don't accidentally generate data via izip_longest
+ # don't accidentally generate data via izip_longest(zip_longest
+ # in python 3)
assert len(data) >= len(desc), \
"{0:d} >= {1:d}".format(len(data), len(desc))
rv = []
i = 0
- for data2, desc2 in itertools.izip_longest(data, desc,
+ for data2, desc2 in zip_longest(data, desc,
fillvalue=list(desc)[0]):
name2 = "{0}[{1:d}]".format(name, i)
i += 1
@@ -2374,10 +2388,10 @@
are converted to string objects."""
rvdct = {}
- for k, v in dct.iteritems():
- if type(k) == unicode:
+ for k, v in six.iteritems(dct):
+ if type(k) == six.text_type:
k = k.encode("utf-8")
- if type(v) == unicode:
+ if type(v) == six.text_type:
v = v.encode("utf-8")
rvdct[k] = v
@@ -2625,7 +2639,7 @@
if no_proxy or no_proxy_upper:
# SplitResult has a netloc member; pylint: disable=E1103
- netloc = urlparse.urlsplit(uri, allow_fragments=0).netloc
+ netloc = urlsplit(uri, allow_fragments=0).netloc
host = netloc.split(":")[0]
if host in no_proxy or no_proxy == ["*"]:
return "-"
--- a/src/modules/p5i.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/p5i.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,13 +25,15 @@
#
import os
+import simplejson as json
+
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.parse import urlunparse
+from six.moves.urllib.request import urlopen, pathname2url
+
import pkg.client.api_errors as api_errors
import pkg.client.publisher as publisher
import pkg.fmri as fmri
-import simplejson as json
-import urllib
-import urllib2
-import urlparse
CURRENT_VERSION = 1
MIME_TYPE = "application/vnd.pkg5.info"
@@ -65,13 +67,13 @@
not location.startswith("file:/"):
# Convert the file path to a URI.
location = os.path.abspath(location)
- location = urlparse.urlunparse(("file", "",
- urllib.pathname2url(location), "", "", ""))
+ location = urlunparse(("file", "",
+ pathname2url(location), "", "", ""))
try:
- fileobj = urllib2.urlopen(location)
+ fileobj = urlopen(location)
except (EnvironmentError, ValueError,
- urllib2.HTTPError) as e:
+ HTTPError) as e:
raise api_errors.RetrievalError(e,
location=location)
@@ -80,7 +82,7 @@
dump_struct = json.loads(data)
else:
dump_struct = json.load(fileobj)
- except (EnvironmentError, urllib2.HTTPError) as e:
+ except (EnvironmentError, HTTPError) as e:
raise api_errors.RetrievalError(e)
except ValueError as e:
# Not a valid JSON file.
@@ -103,7 +105,7 @@
for p in plist:
alias = p.get("alias", None)
prefix = p.get("name", None)
-
+
if not prefix:
prefix = "Unknown"
--- a/src/modules/p5p.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/p5p.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,9 +28,13 @@
import collections
import errno
import tarfile as tf
-import pkg.pkggzip
-import pkg.pkgtarfile as ptf
import os
+import shutil
+import six
+import sys
+import tempfile
+from six.moves.urllib.parse import unquote
+
import pkg
import pkg.client.api_errors as apx
import pkg.client.publisher
@@ -40,10 +44,8 @@
import pkg.misc
import pkg.portable
import pkg.p5i
-import shutil
-import sys
-import tempfile
-import urllib
+import pkg.pkggzip
+import pkg.pkgtarfile as ptf
if sys.version > '3':
long = int
@@ -723,7 +725,7 @@
"""
assert pfmri and mpath and fpath
- if isinstance(pfmri, basestring):
+ if isinstance(pfmri, six.string_types):
pfmri = pkg.fmri.PkgFmri(pfmri)
assert pfmri.publisher
self.__add_package(pfmri, mpath, fpath=fpath)
@@ -743,7 +745,7 @@
"""
assert pfmri and repo
- if isinstance(pfmri, basestring):
+ if isinstance(pfmri, six.string_types):
pfmri = pkg.fmri.PkgFmri(pfmri)
assert pfmri.publisher
self.__add_package(pfmri, repo.manifest(pfmri), repo=repo)
@@ -825,8 +827,8 @@
for name in self.__extract_offsets:
if name.startswith(manpath) and name.count("/") == 4:
ignored, stem, ver = name.rsplit("/", 2)
- stem = urllib.unquote(stem)
- ver = urllib.unquote(ver)
+ stem = unquote(stem)
+ ver = unquote(ver)
pfmri = pkg.fmri.PkgFmri(name=stem,
publisher=pub, version=ver)
@@ -930,7 +932,7 @@
assert not self.__closed and "r" in self.__mode
assert pfmri and path
- if isinstance(pfmri, basestring):
+ if isinstance(pfmri, six.string_types):
pfmri = pkg.fmri.PkgFmri(pfmri)
assert pfmri.publisher
@@ -1124,7 +1126,7 @@
assert not self.__closed and "r" in self.__mode
assert pfmri
- if isinstance(pfmri, basestring):
+ if isinstance(pfmri, six.string_types):
pfmri = pkg.fmri.PkgFmri(pfmri)
assert pfmri.publisher
@@ -1148,7 +1150,7 @@
in the archive."""
if self.__pubs:
- return self.__pubs.values()
+ return list(self.__pubs.values())
# If the extraction index doesn't exist, scan the complete
# archive and build one.
@@ -1180,7 +1182,7 @@
self.__pubs[pfx] = pub
- return self.__pubs.values()
+ return list(self.__pubs.values())
def __cleanup(self):
"""Private helper method to cleanup temporary files."""
--- a/src/modules/p5s.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/p5s.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,15 +26,13 @@
import copy
import os
+import simplejson as json
+from six.moves.urllib.parse import urlparse, urlunparse
+
import pkg.client.api_errors as api_errors
import pkg.client.publisher as publisher
import pkg.digest as digest
import pkg.fmri as fmri
-import simplejson as json
-import urllib
-import urllib2
-import urlparse
-
from pkg.client.imageconfig import DEF_TOKEN
CURRENT_VERSION = 0
@@ -59,9 +57,9 @@
if val.startswith("http://{0}".format(
publisher.SYSREPO_PROXY)):
scheme, netloc, path, params, query, fragment =\
- urlparse.urlparse(val)
+ urlparse(val)
r = publisher.RepositoryURI(
- urlparse.urlunparse((scheme, proxy_host,
+ urlunparse((scheme, proxy_host,
path, params, query, fragment)))
else:
# This URI needs to be proxied through the
--- a/src/modules/pipeutils.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/pipeutils.py Wed Jul 01 16:20:01 2015 -0700
@@ -69,10 +69,8 @@
"""
from __future__ import print_function
-import SocketServer
import errno
import fcntl
-import httplib
import os
import socket
import stat
@@ -96,7 +94,11 @@
#
# Unused import; pylint: disable=W0611
from jsonrpclib import ProtocolError as ProtocolError1
-from xmlrpclib import ProtocolError as ProtocolError2
+
+# import-error; pylint: disable=F0401
+# no-name-in-module; pylint: disable=E0611
+from six.moves import socketserver, http_client
+from six.moves.xmlrpc_client import ProtocolError as ProtocolError2
# Unused import; pylint: enable=W0611
# debugging
@@ -332,8 +334,11 @@
"""set socket opt."""
pass
-
-class PipedHTTPResponse(httplib.HTTPResponse):
+# pylint seems to be panic about these.
+# PipedHTTP: Class has no __init__ method; pylint: disable=W0232
+# PipedHTTPResponse.begin: Attribute 'will_close' defined outside __init__;
+# pylint: disable=W0201
+class PipedHTTPResponse(http_client.HTTPResponse):
"""Create a httplib.HTTPResponse like object that can be used with
a pipe as a transport. We override the minimum number of parent
routines necessary."""
@@ -342,12 +347,12 @@
"""Our connection will never be automatically closed, so set
will_close to False."""
- httplib.HTTPResponse.begin(self)
+ http_client.HTTPResponse.begin(self)
self.will_close = False
return
-class PipedHTTPConnection(httplib.HTTPConnection):
+class PipedHTTPConnection(http_client.HTTPConnection):
"""Create a httplib.HTTPConnection like object that can be used with
a pipe as a transport. We override the minimum number of parent
routines necessary."""
@@ -359,7 +364,7 @@
assert port is None
# invoke parent constructor
- httplib.HTTPConnection.__init__(self, "localhost",
+ http_client.HTTPConnection.__init__(self, "localhost",
strict=strict)
# self.sock was initialized by httplib.HTTPConnection
@@ -440,14 +445,14 @@
return self.parse_response(c.makefile())
-class _PipedServer(SocketServer.BaseServer):
+class _PipedServer(socketserver.BaseServer):
"""Modeled after SocketServer.TCPServer."""
def __init__(self, fd, RequestHandlerClass):
self.__pipe_file = PipeFile(fd, "server-transport")
self.__shutdown_initiated = False
- SocketServer.BaseServer.__init__(self,
+ socketserver.BaseServer.__init__(self,
server_address="localhost",
RequestHandlerClass=RequestHandlerClass)
--- a/src/modules/pkgsubprocess.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/pkgsubprocess.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,6 +27,7 @@
import os
import platform
import types
+import six
import subprocess
import pkg.portable
try:
@@ -156,7 +157,7 @@
# the explicit conversion to a list.
env = [
"{0}={1}".format(k, v)
- for k, v in env.iteritems()
+ for k, v in six.iteritems(env)
]
self.pid = posix_spawnp(executable, args, sfa, env)
--- a/src/modules/portable/os_unix.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/portable/os_unix.py Wed Jul 01 16:20:01 2015 -0700
@@ -175,7 +175,7 @@
return
users[dirpath] = user = {}
uids[dirpath] = uid = {}
- f = file(passwd_file)
+ f = open(passwd_file)
for line in f:
arr = line.rstrip().split(":")
if len(arr) != 7:
@@ -206,7 +206,7 @@
return
groups[dirpath] = group = {}
gids[dirpath] = gid = {}
- f = file(group_file)
+ f = open(group_file)
for line in f:
arr = line.rstrip().split(":")
if len(arr) != 4:
--- a/src/modules/publish/dependencies.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/publish/dependencies.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,9 +29,10 @@
import operator
import os
import re
-import urllib
+import six
from collections import namedtuple
+from six.moves.urllib.parse import unquote
import pkg.actions as actions
import pkg.client.api as api
@@ -153,7 +154,7 @@
def __str__(self):
s = ""
- for r, diff in sorted(self.rvs.iteritems()):
+ for r, diff in sorted(six.iteritems(self.rvs)):
for kind in diff.type_diffs:
s += _("\t{r:15} Variant '{kind}' is not "
"declared.\n").format(
@@ -196,7 +197,7 @@
which share a predicate. pkg(5) can not represent these dependencies. This
issue can be resolved by changing the packaging of the links which generated the
conditional dependencies so that they have different predicates or share the
-same FMRI. Each pair of problematic conditional dependencies follows:
+same FMRI. Each pair of problematic conditional dependencies follows:
""")
for i, (d1, d2, v) in enumerate(self.conditionals):
i += 1
@@ -650,7 +651,7 @@
'mfst' is the Manifest object."""
if mfst is None:
- return urllib.unquote(os.path.basename(fp)), None
+ return unquote(os.path.basename(fp)), None
name = mfst.get("pkg.fmri", mfst.get("fmri", None))
if name is not None:
try:
@@ -658,7 +659,7 @@
except fmri.IllegalFmri:
pfmri = None
return name, pfmri
- return urllib.unquote(os.path.basename(fp)), None
+ return unquote(os.path.basename(fp)), None
def make_paths(file_dep):
"""Find all the possible paths which could satisfy the dependency
@@ -669,9 +670,9 @@
rps = file_dep.attrs.get(paths_prefix, [""])
files = file_dep.attrs[files_prefix]
- if isinstance(files, basestring):
+ if isinstance(files, six.string_types):
files = [files]
- if isinstance(rps, basestring):
+ if isinstance(rps, six.string_types):
rps = [rps]
return [os.path.join(rp, f) for rp in rps for f in files]
@@ -1052,7 +1053,7 @@
elif v != dest.attrs[k]:
# For now, just merge the values. Duplicate values
# will be removed in a later step.
- if isinstance(v, basestring):
+ if isinstance(v, six.string_types):
v = [v]
if isinstance(dest.attrs[k], list):
dest.attrs[k].extend(v)
@@ -1515,7 +1516,7 @@
"""Given a dependency action with pkg.debug.depend attributes
return a matching action with those attributes removed"""
- attrs = dict((k, v) for k, v in action.attrs.iteritems()
+ attrs = dict((k, v) for k, v in six.iteritems(action.attrs)
if not k.startswith(base.Dependency.DEPEND_DEBUG_PREFIX))
return actions.depend.DependencyAction(**attrs)
@@ -1675,7 +1676,7 @@
for pkg_vct in package_vars.values():
pkg_vct.merge_unknown(distro_vars)
# Populate the installed files dictionary.
- for pth, l in tmp_files.iteritems():
+ for pth, l in six.iteritems(tmp_files):
new_val = [
(p, __merge_actvct_with_pkgvct(tmpl,
package_vars[p.pkg_name]))
@@ -1685,7 +1686,7 @@
del tmp_files
# Populate the link dictionary using the installed packages'
# information.
- for pth, l in tmp_links.iteritems():
+ for pth, l in six.iteritems(tmp_links):
new_val = [
(p, __merge_actvct_with_pkgvct(tmpl,
package_vars[p.pkg_name]), target)
--- a/src/modules/publish/transaction.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/publish/transaction.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,8 +29,8 @@
though the other classes can be referred to for documentation purposes."""
import os
-import urllib
-import urlparse
+import six
+from six.moves.urllib.parse import quote, unquote, urlparse, urlunparse
from pkg.misc import EmptyDict
import pkg.actions as actions
@@ -48,12 +48,6 @@
self.data = args[0]
self._args = kwargs
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return str(self.data)
@@ -179,7 +173,7 @@
def open(self):
"""Starts an in-flight transaction. Returns a URL-encoded
transaction ID on success."""
- return urllib.quote(self.pkg_name, "")
+ return quote(self.pkg_name, "")
def append(self):
"""Starts an in-flight transaction to append to an existing
@@ -201,13 +195,13 @@
progtrack=None):
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(origin_url, "http", allow_fragments=0)
+ urlparse(origin_url, "http", allow_fragments=0)
self.pkg_name = pkg_name
self.trans_id = trans_id
self.scheme = scheme
if scheme == "file":
- path = urllib.unquote(path)
+ path = unquote(path)
self.path = path
self.progtrack = progtrack
self.transport = xport
@@ -377,7 +371,7 @@
msg=_("Unknown failure; no transaction ID provided"
" in response."))
- return self.trans_id
+ return self.trans_id
def refresh_index(self):
"""Instructs the repository to refresh its search indices.
@@ -430,7 +424,7 @@
pub=None, progtrack=None):
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(origin_url, "http", allow_fragments=0)
+ urlparse(origin_url, "http", allow_fragments=0)
scheme = scheme.lower()
if noexecute:
@@ -459,7 +453,7 @@
raise TransactionRepositoryURLError(origin_url)
# Rebuild the url with the sanitized components.
- origin_url = urlparse.urlunparse((scheme, netloc, path, params,
+ origin_url = urlunparse((scheme, netloc, path, params,
query, fragment))
return cls.__schemes[scheme](origin_url,
--- a/src/modules/query_parser.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/query_parser.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,12 +28,12 @@
import os
import fnmatch
import re
+import six
import sys
import threading
import copy
import itertools
import errno
-import cgi
import ply.lex as lex
import ply.yacc as yacc
@@ -146,7 +146,7 @@
t.type = "FTERM"
t.value = (pkg_name, action_type, key, token)
return t
-
+
def t_TERM(self, t):
# This rule handles the general search terms as well as
# checking for any reserved words such as AND or OR.
@@ -166,7 +166,7 @@
def set_input(self, input):
self.lexer.input(input)
-
+
def token(self):
return self.lexer.token()
@@ -175,7 +175,7 @@
def get_string(self):
return self.lexer.lexdata
-
+
def test(self, data):
"""This is a function useful for testing and debugging as it
shows the user exactly which tokens are produced from the input
@@ -249,7 +249,7 @@
# performing the search.
'term : TERM'
p[0] = self.query_objs["TermQuery"](p[1])
-
+
def p_fterm(self, p):
# fterms are the parser's representation of the lexer's FTERMS
# (which are field/structured query terms). In the query
@@ -366,14 +366,8 @@
self.lexer.set_input(input)
return self.parser.parse(lexer=self.lexer)
-
class QueryException(Exception):
-
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
class QueryLengthExceeded(QueryException):
@@ -419,7 +413,7 @@
"and the text of the query. The query provided lacked at "
"least one of those fields:\n{0}").format(self.query)
-
+
class ParseError(QueryException):
def __init__(self, parse_object, string_position, input_string):
QueryException.__init__(self)
@@ -589,7 +583,7 @@
self.lc.set_info(**kwargs)
self.rc.set_info(**kwargs)
-
+
def search(self, *args):
"""Distributes the search to the two children and returns a
tuple of the results."""
@@ -657,14 +651,14 @@
# also removed from the results.
lc_set, rc_set = BooleanQuery.search(self, *args)
return self.sorted(lc_set & rc_set)
-
+
def __str__(self):
return "({0!s} AND {1!s})".format(self.lc, self.rc)
def __repr__(self):
return "({0!r} AND {1!r})".format(self.lc, self.rc)
-
+
class OrQuery(BooleanQuery):
"""Class representing OR queries in the AST."""
@@ -719,7 +713,7 @@
class PkgConversion(object):
"""Class representing a change from returning actions to returning
packages in the AST."""
-
+
def __init__(self, query):
self.query = query
self.return_type = Query.RETURN_PACKAGES
@@ -753,7 +747,7 @@
(pfmri for at, st, pfmri, fv, l in it)))
else:
assert 0
-
+
def search(self, restriction, *args):
"""Takes the results of its child's search and converts the
results to be a sorted list of packages.
@@ -846,7 +840,7 @@
return fv
else:
return fs
-
+
def search(self, restriction, *args):
"""Perform a search for the given phrase. The child is used to
find instances of the first word of the phrase. Those results
@@ -875,7 +869,7 @@
itself. It then returns the new node to its parent for
insertion into the tree."""
return PkgConversion(self)
-
+
class FieldQuery(object):
"""Class representing a structured query in the AST."""
@@ -955,7 +949,7 @@
return x >= self.start_point and \
(self.num_to_return is None or
x < self.num_to_return + self.start_point)
-
+
def finalize_results(self, it):
"""Converts the internal result representation to the format
which is expected by the callers of search. It also handles
@@ -991,8 +985,8 @@
self.num_to_return = num_to_return
self.query.set_info(start_point=start_point,
num_to_return=num_to_return, **kwargs)
-
-
+
+
def search(self, *args):
"""Perform search by taking the result of the child's search
and transforming and subselecting the results. None is passed
@@ -1005,7 +999,7 @@
"""Returns whether the query supports a query of version v."""
return self.query.allow_version(v)
-
+
def propagate_pkg_return(self):
"""Makes the child return packages instead of actions.
@@ -1033,7 +1027,7 @@
has_non_wildcard_character = re.compile('.*[^\*\?].*')
fmris = None
-
+
def __init__(self, term):
"""term is a the string for the token to be searched for."""
@@ -1164,21 +1158,21 @@
@staticmethod
def __is_wildcard(s):
return s == '*' or s == ''
-
+
def add_trailing_wildcard(self):
"""Ensures that the search is a prefix match. Primarily used
by the PhraseQuery class."""
if not self._term.endswith('*'):
self._term += "*"
-
+
def set_info(self, index_dir, get_manifest_path,
case_sensitive, **kwargs):
"""Sets the information needed to search which is specific to
the particular index used to back the search.
'index_dir' is a path to the base directory of the index.
-
+
'get_manifest_path' is a function which when given a
fully specified fmri returns the path to the manifest file
for that fmri.
@@ -1206,7 +1200,7 @@
ss.FMRI_OFFSETS_FILE, None)
# Create a temporary list of dictionaries we need to
# open consistently.
- tmp = tq_gdd.values()
+ tmp = list(tq_gdd.values())
tmp.append(self._data_main_dict)
try:
# Try to open the index files assuming they
@@ -1219,7 +1213,7 @@
# back to the index prior to the conversion
# to using the fmri_offsets.v1 file.
del tq_gdd["fmri_offsets"]
- tmp = tq_gdd.values()
+ tmp = list(tq_gdd.values())
tmp.append(self._data_main_dict)
ret = ss.consistent_open(tmp, self._dir_path,
self._file_timeout_secs)
@@ -1251,7 +1245,7 @@
for k, d
in tq_gdd.items()
])
- tmp = tq_gdd.values()
+ tmp = list(tq_gdd.values())
tmp.append(self._data_main_dict)
ret = ss.consistent_open(tmp,
self._dir_path,
@@ -1285,7 +1279,7 @@
def allow_version(self, v):
"""Returns whether the query supports a query of version v."""
return True
-
+
def _close_dicts(self):
"""Closes the main dictionary file handle, which is handled
separately from the other dictionaries since it's not read
@@ -1306,7 +1300,7 @@
else:
res.append(l)
return res
-
+
def _restricted_search_internal(self, restriction):
"""Searches for the given term within a restricted domain of
search results. restriction is a generator function that
@@ -1480,13 +1474,13 @@
elif glob and \
not TermQuery.has_non_wildcard_character.match(term):
line_iter = self._data_main_dict.get_file_handle()
-
+
for line in line_iter:
assert not line == '\n'
tok, at_lst = \
self._data_main_dict.parse_main_dict_line(line)
# Check that the token was what was expected.
- assert ((term == tok) or
+ assert ((term == tok) or
(not case_sensitive and
term.lower() == tok.lower()) or
(glob and fnmatch.fnmatch(tok, term)) or
--- a/src/modules/search_errors.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/search_errors.py Wed Jul 01 16:20:01 2015 -0700
@@ -35,12 +35,6 @@
def __init__(self, cause):
self.cause = cause
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
class InconsistentIndexException(IndexingException):
"""This is used when the existing index is found to have inconsistent
@@ -88,12 +82,6 @@
def __init__(self, index_dir):
self.index_dir = index_dir
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return "Could not find index to search, looked in: " \
"{0}".format(self.index_dir)
@@ -107,12 +95,6 @@
self.ev = existing_val
self.iv = incoming_val
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return "existing_val was:{0}\nincoming_val was:{1}".format(
self.ev, self.iv)
--- a/src/modules/search_storage.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/search_storage.py Wed Jul 01 16:20:01 2015 -0700
@@ -28,7 +28,7 @@
import errno
import time
import hashlib
-import urllib
+from six.moves.urllib.parse import quote, unquote
import pkg.fmri as fmri
import pkg.search_errors as search_errors
@@ -154,7 +154,7 @@
else:
self._file_handle = f_handle
self._file_path = f_path
- if self._mtime is None:
+ if self._mtime is None:
stat_info = os.stat(self._file_path)
self._mtime = stat_info.st_mtime
self._size = stat_info.st_size
@@ -263,7 +263,7 @@
split_chars = IndexStoreMainDict.sep_chars
line = line.rstrip('\n')
tmp = line.split(split_chars[0])
- tok = urllib.unquote(tmp[0])
+ tok = unquote(tmp[0])
atl = tmp[1:]
res = []
for ati in atl:
@@ -278,7 +278,7 @@
st_res = []
for fvi in fvl:
tmp = fvi.split(split_chars[3])
- full_value = urllib.unquote(tmp[0])
+ full_value = unquote(tmp[0])
pfl = tmp[1:]
fv_res = []
for pfi in pfl:
@@ -303,7 +303,7 @@
line = line.rstrip("\n")
lst = line.split(" ", 1)
- return urllib.unquote(lst[0])
+ return unquote(lst[0])
@staticmethod
def transform_main_dict_line(token, entries):
@@ -322,7 +322,7 @@
in _write_main_dict_line in indexer.py.
"""
sep_chars = IndexStoreMainDict.sep_chars
- res = "{0}".format(urllib.quote(str(token)))
+ res = "{0}".format(quote(str(token)))
for ati, atl in enumerate(entries):
action_type, atl = atl
res += "{0}{1}".format(sep_chars[0], action_type)
@@ -332,7 +332,7 @@
for fvi, fvl in enumerate(stl):
full_value, fvl = fvl
res += "{0}{1}".format(sep_chars[2],
- urllib.quote(str(full_value)))
+ quote(str(full_value)))
for pfi, pfl in enumerate(fvl):
pfmri_index, pfl = pfl
res += "{0}{1}".format(sep_chars[3],
@@ -431,7 +431,7 @@
def get_id_and_add(self, entity):
"""Adds entity if it's not previously stored and returns the
- id for entity.
+ id for entity.
"""
# This code purposefully reimplements add_entity
# code. Replacing the function calls to has_entity, add_entity,
@@ -558,12 +558,12 @@
return self._dict[entity]
def get_keys(self):
- return self._dict.keys()
+ return list(self._dict.keys())
@staticmethod
def __quote(str):
if " " in str:
- return "1" + urllib.quote(str)
+ return "1" + quote(str)
else:
return "0" + str
@@ -575,7 +575,7 @@
for line in self._file_handle:
token, offset = line.split(" ")
if token[0] == "1":
- token = urllib.unquote(token[1:])
+ token = unquote(token[1:])
else:
token = token[1:]
offset = int(offset)
@@ -663,7 +663,7 @@
"""Returns the number of entries removed during a second phase
of indexing."""
return 0
-
+
class IndexStoreSet(IndexStoreBase):
"""Used when only set membership is desired.
This is currently designed for exclusive use
@@ -738,7 +738,7 @@
p_id_trans is an object which has a get entity method which,
when given a package id number returns the PkgFmri object
for that id number."""
-
+
IndexStoreBase.__init__(self, file_name)
self._p_id_trans = p_id_trans
self._dict = {}
@@ -755,7 +755,7 @@
self._fmri_offsets[p_id].append(offset)
except KeyError:
self._fmri_offsets[p_id] = [offset]
-
+
def invert_id_to_offsets_dict(self):
"""Does delta encoding of offsets to reduce space by only
storing the difference between the current offset and the
@@ -790,7 +790,7 @@
include_scheme=False)
for p_id in p_ids
]) + "!" + offset_str
-
+
def write_dict_file(self, path, version_num):
"""Write the mapping of package fmris to offset sets out
to the file."""
@@ -824,7 +824,7 @@
ret.append(o)
old_o = o
return ret
-
+
def get_offsets(self, match_func):
"""For a given function which returns true if it matches the
desired fmri, return the offsets which are associated with the
--- a/src/modules/server/api.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/api.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,6 +27,9 @@
import itertools
import os
import StringIO
+import six
+
+from operator import itemgetter
import pkg.catalog
import pkg.client.pkgdefs as pkgdefs
@@ -40,7 +43,6 @@
from pkg.api_common import (PackageInfo, LicenseInfo, PackageCategory,
_get_pkg_cat_data)
-from operator import itemgetter
CURRENT_API_VERSION = 12
@@ -139,7 +141,7 @@
# incorporations above.
cat_info = frozenset([cat.DEPENDENCY])
remaining = set(cat.names(pubs=pubs)) - \
- set(allowed.iterkeys())
+ set(six.iterkeys(allowed))
for pkg_name in remaining:
for ver, flist in cat.fmris_by_version(pkg_name,
pubs=pubs):
@@ -506,7 +508,7 @@
def filtered_search(results, mver):
try:
- result = results.next()
+ result = next(results)
except StopIteration:
return
@@ -574,7 +576,7 @@
for lic in mfst.gen_actions_by_type("license"):
s = StringIO.StringIO()
lpath = self._depot.repo.file(lic.hash, pub=self._pub)
- lfile = file(lpath, "rb")
+ lfile = open(lpath, "rb")
misc.gunzip_from_stream(lfile, s, ignore_hash=True)
text = s.getvalue()
s.close()
@@ -679,7 +681,7 @@
See pkg.depotd(1M) for the list of properties.
"""
rval = {}
- for sname, props in self._depot.cfg.get_index().iteritems():
+ for sname, props in six.iteritems(self._depot.cfg.get_index()):
rval[sname] = [p for p in props]
return rval
@@ -708,7 +710,7 @@
format.
"""
rval = {}
- for sname, props in self._depot.repo.cfg.get_index().iteritems():
+ for sname, props in six.iteritems(self._depot.repo.cfg.get_index()):
rval[sname] = [p for p in props]
return rval
--- a/src/modules/server/api_errors.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/api_errors.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,12 +30,6 @@
if args:
self.data = args[0]
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return str(self.data)
--- a/src/modules/server/catalog.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/catalog.py Wed Jul 01 16:20:01 2015 -0700
@@ -38,12 +38,6 @@
def __init__(self, args=None):
self._args = args
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
class CatalogPermissionsException(CatalogException):
"""Used to indicate the server catalog files do not have the expected
@@ -236,12 +230,12 @@
# Try to open catalog file. If it doesn't exist,
# create an empty catalog file, and then open it read only.
try:
- pfile = file(self.catalog_file, "rb")
+ pfile = open(self.catalog_file, "rb")
except IOError as e:
if e.errno == errno.ENOENT:
# Creating an empty file
- file(self.catalog_file, "wb").close()
- pfile = file(self.catalog_file, "rb")
+ open(self.catalog_file, "wb").close()
+ pfile = open(self.catalog_file, "rb")
else:
portable.remove(tmpfile)
raise
@@ -357,7 +351,7 @@
the catalog as a list of strings."""
try:
- cfile = file(self.catalog_file, "r")
+ cfile = open(self.catalog_file, "r")
except EnvironmentError as e:
# Missing catalog is fine; other errors need to
# be reported.
@@ -423,7 +417,7 @@
iterates over the contents of the catalog."""
try:
- pfile = file(os.path.normpath(
+ pfile = open(os.path.normpath(
os.path.join(self.catalog_root, "catalog")), "r")
except IOError as e:
if e.errno == errno.ENOENT:
@@ -461,7 +455,7 @@
if not os.path.exists(apath):
return
- afile = file(apath, "r")
+ afile = open(apath, "r")
attrre = re.compile('^S ([^:]*): (.*)')
for entry in afile:
@@ -626,7 +620,7 @@
yield line
try:
- cfile = file(os.path.normpath(
+ cfile = open(os.path.normpath(
os.path.join(self.catalog_root, "catalog")),
"r")
except IOError as e:
@@ -722,7 +716,7 @@
"""Read the catalog file in "path" and combine it with the
existing data in "catalog"."""
- catf = file(os.path.join(path, "catalog"))
+ catf = open(os.path.join(path, "catalog"))
for line in catf:
if not line.startswith("V pkg") and \
not line.startswith("C pkg"):
--- a/src/modules/server/depot.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/depot.py Wed Jul 01 16:20:01 2015 -0700
@@ -44,7 +44,6 @@
import ast
import cStringIO
import errno
-import httplib
import inspect
import itertools
import math
@@ -52,13 +51,17 @@
import random
import re
import shutil
+import six
import simplejson as json
import socket
import tarfile
import tempfile
import threading
import time
-import urlparse
+
+from six.moves import http_client, queue
+from six.moves.urllib.parse import quote, urlunsplit
+
# Without the below statements, tarfile will trigger calls to getpwuid and
# getgrgid for every file downloaded. This in turn leads to nscd usage which
# limits the throughput of the depot process. Setting these attributes to
@@ -67,9 +70,6 @@
tarfile.pwd = None
tarfile.grp = None
-import urllib
-import Queue
-
import pkg
import pkg.actions as actions
import pkg.config as cfg
@@ -86,7 +86,6 @@
from pkg.server.query_parser import Query, ParseError, BooleanQueryException
-
class Dummy(object):
"""Dummy object used for dispatch method mapping."""
pass
@@ -281,7 +280,7 @@
try:
self.__bgtask.put(self.repo.refresh_index)
- except Queue.Full:
+ except queue.Full:
# If another operation is already in progress, just
# log a warning and drive on.
cherrypy.log("Skipping indexing; another operation is "
@@ -428,7 +427,7 @@
op = tokens[1]
if op in self.REPO_OPS_DEFAULT and op not in self.vops:
- raise cherrypy.HTTPError(httplib.NOT_FOUND,
+ raise cherrypy.HTTPError(http_client.NOT_FOUND,
"Operation not supported in current server mode.")
elif op not in self.vops:
request = cherrypy.request
@@ -445,20 +444,20 @@
else:
ver = int(tokens[2])
except IndexError:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"Missing version\n")
except ValueError:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"Non-integer version\n")
if ver not in self.vops[op]:
# 'version' is not supported for the operation.
- raise cherrypy.HTTPError(httplib.NOT_FOUND,
+ raise cherrypy.HTTPError(http_client.NOT_FOUND,
"Version '{0}' not supported for operation '{1}'\n".format(
ver, op))
elif op == "open" and pub not in self.repo.publishers:
if not misc.valid_pub_prefix(pub):
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"Invalid publisher prefix: {0}\n".format(pub))
# Map operations for new publisher.
@@ -473,12 +472,12 @@
cherrypy.request.query_string)
raise cherrypy.InternalRedirect(rel_uri)
elif pub:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"Unknown publisher: {0}\n".format(pub))
# Assume 'version' is not supported for the operation for some
# other reason.
- raise cherrypy.HTTPError(httplib.NOT_FOUND, "Version '{0}' not "
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, "Version '{0}' not "
"supported for operation '{1}'\n".format(ver, op))
@cherrypy.tools.response_headers(headers=\
@@ -491,7 +490,7 @@
versions = "pkg-server {0}\n".format(pkg.VERSION)
versions += "\n".join(
"{0} {1}".format(op, " ".join(str(v) for v in vers))
- for op, vers in self.vops.iteritems()
+ for op, vers in six.iteritems(self.vops)
) + "\n"
return versions
@@ -516,14 +515,14 @@
res_list = self.repo.search(query_args_lst,
pub=self._get_req_pub())
except srepo.RepositorySearchUnavailableError as e:
- raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+ raise cherrypy.HTTPError(http_client.SERVICE_UNAVAILABLE,
str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
# Translate the results from v1 format into what a v0
# searcher expects as results.
@@ -559,28 +558,28 @@
# Check for the POST method of doing a search request.
if not query_str_lst:
- query_str_lst = params.values()
- elif params.values():
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ query_str_lst = list(params.values())
+ elif list(params.values()):
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"args:{0}, params:{1}".format(args, params))
if not query_str_lst:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST)
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST)
try:
res_list = self.repo.search(query_str_lst,
pub=self._get_req_pub())
except (ParseError, BooleanQueryException) as e:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
except srepo.RepositorySearchUnavailableError as e:
- raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+ raise cherrypy.HTTPError(http_client.SERVICE_UNAVAILABLE,
str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
# In order to be able to have a return code distinguish between
# no results and search unavailable, we need to use a different
@@ -591,10 +590,10 @@
# to the user.
if len(res_list) == 1:
try:
- tmp = res_list[0].next()
+ tmp = next(res_list[0])
res_list = [itertools.chain([tmp], res_list[0])]
except StopIteration:
- cherrypy.response.status = httplib.NO_CONTENT
+ cherrypy.response.status = http_client.NO_CONTENT
return
response = cherrypy.response
@@ -611,7 +610,7 @@
fmri_str, fv, line = vals
yield "{0} {1} {2} {3} {4}\n".format(
i, return_type, fmri_str,
- urllib.quote(fv),
+ quote(fv),
line.rstrip())
elif return_type == \
Query.RETURN_PACKAGES:
@@ -635,7 +634,7 @@
cat = self.repo.get_catalog(pub=self._get_req_pub())
except srepo.RepositoryError as e:
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
response = cherrypy.response
response.headers["Content-type"] = "text/plain; charset=utf-8"
@@ -672,7 +671,7 @@
try:
name = tokens[0]
except IndexError:
- raise cherrypy.HTTPError(httplib.FORBIDDEN,
+ raise cherrypy.HTTPError(http_client.FORBIDDEN,
_("Directory listing not allowed."))
try:
@@ -683,7 +682,7 @@
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
self.__set_response_expires("catalog", 86400, 86400)
return serve_file(fpath, "text/plain; charset=utf-8")
@@ -700,13 +699,13 @@
pubs = self.repo.publishers
except Exception as e:
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
# A broken proxy (or client) has caused a fully-qualified FMRI
# to be split up.
comps = [t for t in tokens]
if not comps:
- raise cherrypy.HTTPError(httplib.FORBIDDEN,
+ raise cherrypy.HTTPError(http_client.FORBIDDEN,
_("Directory listing not allowed."))
if len(comps) > 1 and comps[0] == "pkg:" and comps[1] in pubs:
@@ -724,13 +723,13 @@
fpath = self.repo.manifest(pfmri,
pub=self._get_req_pub())
except (IndexError, fmri.FmriError) as e:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
# Send manifest
self.__set_response_expires("manifest", 86400*365, 86400*365)
@@ -848,13 +847,13 @@
try:
fpath = self.repo.file(fhash, pub=self._get_req_pub())
except srepo.RepositoryFileNotFoundError as e:
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
self.__set_response_expires("file", 86400*365, 86400*365)
return serve_file(fpath, "application/data")
@@ -870,7 +869,7 @@
return self.file_0(*tokens)
elif method in ("POST", "PUT"):
return self.__upload_file(*tokens)
- raise cherrypy.HTTPError(httplib.METHOD_NOT_ALLOWED,
+ raise cherrypy.HTTPError(http_client.METHOD_NOT_ALLOWED,
"{0} is not allowed".format(method))
# We need to prevent cherrypy from processing the request body so that
@@ -902,7 +901,7 @@
# XXX Authentication will be handled by virtue of possessing a
# signed certificate (or a more elaborate system).
if not pfmri:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
_("A valid package FMRI must be specified."))
try:
@@ -914,7 +913,7 @@
# that to mean that the server doesn't support this
# operation.
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
if pfmri.publisher and not self._get_req_pub():
self.__map_pub_ops(pfmri.publisher)
@@ -942,7 +941,7 @@
# XXX Authentication will be handled by virtue of possessing a
# signed certificate (or a more elaborate system).
if not pfmri:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
_("A valid package FMRI must be specified."))
try:
@@ -954,7 +953,7 @@
# that to mean that the server doesn't support this
# operation.
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
if pfmri.publisher and not self._get_req_pub():
self.__map_pub_ops(pfmri.publisher)
@@ -976,7 +975,7 @@
try:
# cherrypy decoded it, but we actually need it encoded.
- trans_id = urllib.quote(tokens[0], "")
+ trans_id = quote(tokens[0], "")
except IndexError:
trans_id = None
@@ -993,7 +992,7 @@
else:
add_to_catalog = False
except ValueError as e:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"X-IPkg-Add-To-Catalog".format(e))
try:
@@ -1004,7 +1003,7 @@
# returned here as misc.versioned_urlopen will interpret
# that to mean that the server doesn't support this
# operation.
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
response = cherrypy.response
response.headers["Package-FMRI"] = pfmri
@@ -1075,11 +1074,11 @@
pub=self._get_req_pub(),
refresh_index=False)
else:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
"Unknown or unsupported operation: '{0}'".format(
cmd))
- except Queue.Full:
- raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+ except queue.Full:
+ raise cherrypy.HTTPError(http_client.SERVICE_UNAVAILABLE,
"Another operation is already in progress; try "
"again later.")
@@ -1092,7 +1091,7 @@
try:
# cherrypy decoded it, but we actually need it encoded.
- trans_id = urllib.quote(tokens[0], "")
+ trans_id = quote(tokens[0], "")
except IndexError:
trans_id = None
@@ -1103,7 +1102,7 @@
# returned here as misc.versioned_urlopen will interpret
# that to mean that the server doesn't support this
# operation.
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
def add_0(self, *tokens):
"""Adds an action and its content to an in-flight transaction
@@ -1113,7 +1112,7 @@
try:
# cherrypy decoded it, but we actually need it encoded.
- trans_id = urllib.quote(tokens[0], "")
+ trans_id = quote(tokens[0], "")
except IndexError:
trans_id = None
@@ -1123,7 +1122,7 @@
entry_type = None
if entry_type not in actions.types:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, _("The "
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, _("The "
"specified Action Type, '{0}', is not valid.").format(
entry_type))
@@ -1146,7 +1145,7 @@
attrs[a] = val
except ValueError:
raise cherrypy.HTTPError(
- httplib.BAD_REQUEST, _("The "
+ http_client.BAD_REQUEST, _("The "
"specified Action attribute value, "
"'{0}', is not valid.").format(
attrs[a]))
@@ -1162,7 +1161,7 @@
action = actions.types[entry_type](data, **attrs)
except actions.ActionError as e:
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
# XXX Once actions are labelled with critical nature.
# if entry_type in critical_actions:
@@ -1176,7 +1175,7 @@
# that to mean that the server doesn't support this
# operation.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
# We need to prevent cherrypy from processing the request body so that
# add can parse the request body itself. In addition, we also need to
@@ -1200,7 +1199,7 @@
try:
# cherrypy decoded it, but we actually need it encoded.
- trans_id = urllib.quote(tokens[0], "")
+ trans_id = quote(tokens[0], "")
except IndexError:
raise
trans_id = None
@@ -1210,7 +1209,7 @@
size = int(request.headers.get("Content-Length", 0))
if size < 0:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
_("file/1 must be sent a file."))
data = request.rfile
@@ -1221,7 +1220,7 @@
# returned here as misc.versioned_urlopen will interpret
# that to mean that the server doesn't support this
# operation.
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
response.headers["Content-Length"] = "0"
return response.body
@@ -1253,9 +1252,9 @@
err = "Unknown index subcommand: {0}".format(
cmd)
cherrypy.log(err)
- raise cherrypy.HTTPError(httplib.NOT_FOUND, err)
- except Queue.Full:
- raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, err)
+ except queue.Full:
+ raise cherrypy.HTTPError(http_client.SERVICE_UNAVAILABLE,
"Another operation is already in progress; try "
"again later.")
@@ -1273,7 +1272,7 @@
pubs = self.repo.publishers
except Exception as e:
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
# A broken proxy (or client) has caused a fully-qualified FMRI
# to be split up.
@@ -1299,16 +1298,16 @@
pfmri.publisher = pub
fpath = self.repo.manifest(pfmri, pub=pub)
except (IndexError, fmri.FmriError) as e:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
if not os.path.exists(fpath):
- raise cherrypy.HTTPError(httplib.NOT_FOUND)
+ raise cherrypy.HTTPError(http_client.NOT_FOUND)
m = manifest.Manifest(pfmri)
m.set_content(pathname=fpath)
@@ -1326,7 +1325,7 @@
# Skip the license.
continue
- with file(lpath, "rb") as lfile:
+ with open(lpath, "rb") as lfile:
misc.gunzip_from_stream(lfile, lsummary,
ignore_hash=True)
lsummary.seek(0)
@@ -1372,7 +1371,7 @@
# Publisher specified in request is unknown.
e = srepo.RepositoryUnknownPublisher(prefix)
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
buf = cStringIO.StringIO()
try:
@@ -1381,7 +1380,7 @@
# Treat any remaining error as a 404, but log it and
# include the real failure information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
buf.seek(0)
self.__set_response_expires("publisher", 86400*365, 86400*365)
return buf.getvalue()
@@ -1407,7 +1406,7 @@
# treat it as an unsupported operation.
cherrypy.log("Request failed: {0}".format(
str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND,
+ raise cherrypy.HTTPError(http_client.NOT_FOUND,
str(e))
buf = cStringIO.StringIO()
@@ -1417,7 +1416,7 @@
# Treat any remaining error as a 404, but log it and
# include the real failure information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
buf.seek(0)
self.__set_response_expires("publisher", 86400*365, 86400*365)
return buf.getvalue()
@@ -1444,7 +1443,7 @@
# If this fails, it's ok to raise an exception since bad
# input was likely provided.
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
if not matches:
return ""
@@ -1480,7 +1479,7 @@
pubs = self.repo.publishers
except Exception as e:
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
# A broken proxy (or client) has caused a fully-qualified FMRI
# to be split up.
@@ -1496,7 +1495,7 @@
# proxy behaviour.
pfmri = "/".join(comps)
except IndexError:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST)
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST)
# XXX This is a hack to deal with the fact that packagemanager
# brokenly expects all p5i URIs or files to have a .p5i
@@ -1517,7 +1516,7 @@
output += self.__get_matching_p5i_data(rstore, pfmri)
if output == "":
- raise cherrypy.HTTPError(httplib.NOT_FOUND, _("No "
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, _("No "
"matching package found in repository."))
self.__set_response_expires("p5i", 86400*365, 86400*365)
@@ -1537,7 +1536,7 @@
out = json.dumps(dump_struct, ensure_ascii=False,
indent=2, sort_keys=True)
except Exception as e:
- raise cherrypy.HTTPError(httplib.NOT_FOUND, _("Unable "
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, _("Unable "
"to generate statistics."))
return out + "\n"
@@ -1636,16 +1635,16 @@
# are weighted by how often we want them to happen; the loop
# below then puts them into a pick-list.
errors = {
- httplib.REQUEST_TIMEOUT: 10,
- httplib.BAD_GATEWAY: 10,
- httplib.GATEWAY_TIMEOUT: 10,
- httplib.FORBIDDEN: 2,
- httplib.NOT_FOUND: 2,
- httplib.BAD_REQUEST: 2
+ http_client.REQUEST_TIMEOUT: 10,
+ http_client.BAD_GATEWAY: 10,
+ http_client.GATEWAY_TIMEOUT: 10,
+ http_client.FORBIDDEN: 2,
+ http_client.NOT_FOUND: 2,
+ http_client.BAD_REQUEST: 2
}
self.errlist = []
- for x, n in errors.iteritems():
+ for x, n in six.iteritems(errors):
for i in range(0, n):
self.errlist.append(x)
cherrypy.log("NASTY Depot Error List: {0}".format(
@@ -1720,9 +1719,9 @@
try:
nasty_level = int(tokens[0])
except (IndexError, ValueError):
- raise cherrypy.HTTPError(httplib.BAD_REQUEST)
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST)
if nasty_level < 0 or nasty_level > 100:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST)
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST)
cherrypy.log("Nastiness set to {0:d} by client request".format(
nasty_level))
self.nasty_level = nasty_level
@@ -1741,7 +1740,7 @@
if self.need_nasty_3():
cherrypy.log("NASTY versions_0: X-Ipkg-Error")
response = cherrypy.response
- response.status = httplib.UNAUTHORIZED
+ response.status = http_client.UNAUTHORIZED
response.headers["X-Ipkg-Error"] = random.choice(["ENT",
"LIC", "SVR", "MNT", "YYZ", ""])
return ""
@@ -1760,7 +1759,7 @@
if self.need_nasty_2():
cherrypy.log("NASTY versions_0: modified version #s")
versions = "pkg-server {0}-nasty\n".format(pkg.VERSION)
- for op, vers in self.vops.iteritems():
+ for op, vers in six.iteritems(self.vops):
versions += op + " "
verlen = len(vers)
for v in vers:
@@ -1791,7 +1790,7 @@
versions = "pkg-server {0}\n".format(pkg.VERSION)
versions += "\n".join(
"{0} {1}".format(op, " ".join(str(v) for v in vers))
- for op, vers in self.vops.iteritems()
+ for op, vers in six.iteritems(self.vops)
) + "\n"
return versions
@@ -1809,7 +1808,7 @@
# are toxic to clients who are facing a nasty antagonist--
# the client has no way to verify that the content, and
# things go badly off the rails.
- raise cherrypy.HTTPError(httplib.BAD_REQUEST)
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST)
catalog_0._cp_config = {
"response.stream": True,
@@ -1833,13 +1832,13 @@
pubs = self.repo.publishers
except Exception as e:
cherrypy.log("Request failed: {0}".format(e))
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
# A broken proxy (or client) has caused a fully-qualified FMRI
# to be split up.
comps = [t for t in tokens]
if not comps:
- raise cherrypy.HTTPError(httplib.FORBIDDEN,
+ raise cherrypy.HTTPError(http_client.FORBIDDEN,
_("Directory listing not allowed."))
if len(comps) > 1 and comps[0] == "pkg:" and comps[1] in pubs:
@@ -1857,13 +1856,13 @@
fpath = self.repo.manifest(pfmri,
pub=self._get_req_pub())
except (IndexError, fmri.FmriError) as e:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
# NASTY
# Stash manifest entry for later use.
@@ -2035,13 +2034,13 @@
try:
fpath = self.repo.file(fhash, pub=self._get_req_pub())
except srepo.RepositoryFileNotFoundError as e:
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
except srepo.RepositoryError as e:
# Treat any remaining repository error as a 404, but
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
# NASTY
# Stash filename for later use.
@@ -2055,7 +2054,7 @@
if self.need_nasty_4():
# Forget that the file is here
cherrypy.log("NASTY file_0: 404 NOT_FOUND")
- raise cherrypy.HTTPError(httplib.NOT_FOUND)
+ raise cherrypy.HTTPError(http_client.NOT_FOUND)
# NASTY
# Send the wrong file
@@ -2082,7 +2081,7 @@
try:
name = tokens[0]
except IndexError:
- raise cherrypy.HTTPError(httplib.FORBIDDEN,
+ raise cherrypy.HTTPError(http_client.FORBIDDEN,
_("Directory listing not allowed."))
try:
@@ -2093,7 +2092,7 @@
# log the error and include the real failure
# information.
cherrypy.log("Request failed: {0}".format(str(e)))
- raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, str(e))
# NASTY
# Stash catalog entry for later use.
@@ -2121,8 +2120,8 @@
def search_1(self, *args, **params):
# Raise assorted errors; if not, call superclass search_1.
if self.need_nasty():
- errs = [httplib.NOT_FOUND, httplib.BAD_REQUEST,
- httplib.SERVICE_UNAVAILABLE]
+ errs = [http_client.NOT_FOUND, http_client.BAD_REQUEST,
+ http_client.SERVICE_UNAVAILABLE]
code = random.choice(errs)
cherrypy.log("NASTY search_1: HTTP {0:d}".format(code))
raise cherrypy.HTTPError(code)
@@ -2142,7 +2141,7 @@
filesz = fst.st_size
nfile = open(filepath, "rb")
except EnvironmentError:
- raise cherrypy.HTTPError(httplib.NOT_FOUND)
+ raise cherrypy.HTTPError(http_client.NOT_FOUND)
# NASTY
# Send incorrect content length
@@ -2230,7 +2229,7 @@
proto = "http"
netloc = "{0}:{1}".format(socket.getfqdn(), self.port)
- self.url = urlparse.urlunsplit((proto, netloc, '', '', ''))
+ self.url = urlunsplit((proto, netloc, '', '', ''))
def reg_cb(self, sd_hdl, flags, error_code, name, regtype, domain):
"""Callback invoked by service register function. Arguments
@@ -2304,7 +2303,7 @@
def __init__(self, bus):
# Setup the background task queue.
SimplePlugin.__init__(self, bus)
- self.__q = Queue.Queue(10)
+ self.__q = queue.Queue(10)
self.__thread = None
def put(self, task, *args, **kwargs):
@@ -2312,7 +2311,7 @@
isn't full.
"""
if self.__q.unfinished_tasks > 9:
- raise Queue.Full()
+ raise queue.Full()
self.__q.put_nowait((task, args, kwargs))
def run(self):
@@ -2326,7 +2325,7 @@
# for a new task to appear.
task, args, kwargs = \
self.__q.get(timeout=.5)
- except Queue.Empty:
+ except queue.Empty:
continue
task(*args, **kwargs)
if hasattr(self.__q, "task_done"):
--- a/src/modules/server/face.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/face.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,13 +29,16 @@
from __future__ import print_function
import cherrypy
import cherrypy.lib.static
-import httplib
import os
+import sys
+
+from six.moves import http_client
+from six.moves.urllib.parse import unquote
+
import pkg.server.api as api
import pkg.server.api_errors as sae
import pkg.server.feed
-import sys
-import urllib
+
try:
import mako.exceptions
import mako.lookup
@@ -55,10 +58,10 @@
def feed(depot, request, response, pub):
if depot.repo.mirror:
- raise cherrypy.HTTPError(httplib.NOT_FOUND,
+ raise cherrypy.HTTPError(http_client.NOT_FOUND,
"Operation not supported in current server mode.")
if not depot.repo.get_catalog(pub).updates:
- raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+ raise cherrypy.HTTPError(http_client.SERVICE_UNAVAILABLE,
"No update history; unable to generate feed.")
return pkg.server.feed.handle(depot, request, response, pub)
@@ -72,7 +75,7 @@
# All errors are treated as a 404 since reverse proxies such as Apache
# don't handle 500 errors in a desirable way. For any error but a 404,
# an error is logged.
- if error != httplib.NOT_FOUND:
+ if error != http_client.NOT_FOUND:
cherrypy.log("Error encountered while processing "
"template: {0}\n".format(path), traceback=True)
@@ -110,12 +113,12 @@
return feed(depot, request, response, pub)
if not path.endswith(".shtml"):
- spath = urllib.unquote(path)
+ spath = unquote(path)
fname = os.path.join(depot.web_root, spath)
if not os.path.normpath(fname).startswith(
os.path.normpath(depot.web_root)):
# Ignore requests for files outside of the web root.
- return __handle_error(path, httplib.NOT_FOUND)
+ return __handle_error(path, http_client.NOT_FOUND)
else:
return cherrypy.lib.static.serve_file(os.path.join(
depot.web_root, spath))
@@ -133,15 +136,15 @@
error=str(e)))
cherrypy.log("Ensure that the correct --content-root has been "
"provided to pkg.depotd.")
- return __handle_error(request.path_info, httplib.NOT_FOUND)
+ return __handle_error(request.path_info, http_client.NOT_FOUND)
except IOError as e:
- return __handle_error(path, httplib.INTERNAL_SERVER_ERROR)
+ return __handle_error(path, http_client.INTERNAL_SERVER_ERROR)
except mako.exceptions.TemplateLookupException as e:
# The above exception indicates that mako could not locate the
# template (in most cases, Mako doesn't seem to always clearly
# differentiate).
- return __handle_error(path, httplib.NOT_FOUND)
+ return __handle_error(path, http_client.NOT_FOUND)
except sae.RedirectException as e:
raise cherrypy.HTTPRedirect(e.data)
except:
- return __handle_error(path, httplib.INTERNAL_SERVER_ERROR)
+ return __handle_error(path, http_client.INTERNAL_SERVER_ERROR)
--- a/src/modules/server/feed.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/feed.py Wed Jul 01 16:20:01 2015 -0700
@@ -31,20 +31,22 @@
a given time period."""
import cherrypy
-from cherrypy.lib.static import serve_file
import copy
import datetime
-import httplib
import os
import shutil
+import six
import time
-import urllib
-import urlparse
import xml.dom.minidom as xmini
+from cherrypy.lib.static import serve_file
+from six.moves import http_client
+from six.moves.urllib.parse import quote, unquote, urlparse
+
import pkg.catalog as catalog
import pkg.misc as misc
+
MIME_TYPE = "application/atom+xml"
CACHE_FILENAME = "feed.xml"
RFC3339_FMT = "%Y-%m-%dT%H:%M:%SZ"
@@ -67,7 +69,7 @@
"""
return "tag:{0},{1}:{2}".format(f.publisher,
f.get_timestamp().strftime("%Y-%m-%d"),
- urllib.unquote(f.get_url_path()))
+ unquote(f.get_url_path()))
def init(depot):
"""This function performs general initialization work that is needed
@@ -99,10 +101,10 @@
# identifier.
i = doc.createElement("id")
it = xmini.Text()
- netloc, path = urlparse.urlparse(cherrypy.url())[1:3]
+ netloc, path = urlparse(cherrypy.url())[1:3]
netloc = netloc.split(":", 1)[0]
tag = "tag:{0},{1}:{2}".format(netloc, update_ts.strftime("%Y-%m-%d"),
- path)
+ path)
it.replaceWholeText(tag)
i.appendChild(it)
feed.appendChild(i)
@@ -142,7 +144,7 @@
e = doc.createElement("entry")
pfmri, op_type, op_time, metadata = entry
-
+
# Generate a 'tag' uri, to uniquely identify the entry, using the fmri.
i = xmini.Text()
i.replaceWholeText(fmri_to_taguri(pfmri))
@@ -186,7 +188,7 @@
# Link to the info output for the given package FMRI.
e_uri = misc.get_rel_path(request,
- "info/0/{0}".format(urllib.quote(str(pfmri))))
+ "info/0/{0}".format(quote(str(pfmri))))
l = doc.createElement("link")
l.setAttribute("rel", "alternate")
@@ -216,7 +218,7 @@
return []
updates = set()
- for name, mdata in c.updates.iteritems():
+ for name, mdata in six.iteritems(c.updates):
# The last component of the update name is the locale.
locale = name.split(".", 2)[2]
@@ -233,7 +235,7 @@
continue
updates.add(name)
- if not updates:
+ if not updates:
# No updates needed.
return []
@@ -316,7 +318,7 @@
os.remove(pathname)
except IOError:
raise cherrypy.HTTPError(
- httplib.INTERNAL_SERVER_ERROR,
+ http_client.INTERNAL_SERVER_ERROR,
"Unable to clear feed cache.")
def __cache_needs_update(depot, pub):
@@ -390,7 +392,7 @@
# Generate and cache the feed.
misc.makedirs(os.path.dirname(cfpath))
- cf = file(cfpath, "w")
+ cf = open(cfpath, "w")
update(request, depot, last, cf, pub)
cf.close()
--- a/src/modules/server/repository.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/repository.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,14 +29,15 @@
import os
import os.path
import shutil
+import six
import stat
import sys
import tempfile
-import urllib
import zlib
-
import M2Crypto as m2
+from six.moves.urllib.parse import unquote
+
import pkg.actions as actions
import pkg.catalog as catalog
import pkg.client.api_errors as apx
@@ -62,6 +63,8 @@
import pkg.pkgsubprocess as subprocess
import pkg.version
+from pkg.pkggzip import PkgGzipFile
+
CURRENT_REPO_VERSION = 4
REPO_QUARANTINE_DIR = "pkg5-quarantine"
@@ -85,7 +88,6 @@
VERIFY_DEPENDENCY,
])
-from pkg.pkggzip import PkgGzipFile
class RepositoryError(Exception):
"""Base exception class for all Repository exceptions."""
@@ -95,12 +97,6 @@
if args:
self.data = args[0]
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return str(self.data)
@@ -542,8 +538,8 @@
directory and the name of the manifest file, and returns an FMRI
constructed from the information in those components."""
- v = pkg.version.Version(urllib.unquote(ver), None)
- f = fmri.PkgFmri(urllib.unquote(os.path.basename(pkgpath)))
+ v = pkg.version.Version(unquote(ver), None)
+ f = fmri.PkgFmri(unquote(os.path.basename(pkgpath)))
f.version = v
return f
@@ -1689,7 +1685,7 @@
for name in slist:
# Stem must be decoded before use.
try:
- pname = urllib.unquote(name)
+ pname = unquote(name)
except Exception:
# Assume error is result of
# unexpected file in directory;
@@ -1709,7 +1705,7 @@
# Version must be decoded before
# use.
- pver = urllib.unquote(ver)
+ pver = unquote(ver)
try:
pfmri = fmri.PkgFmri(
"@".join((pname,
@@ -2305,7 +2301,7 @@
# Stem must be decoded before use.
try:
- pname = urllib.unquote(name)
+ pname = unquote(name)
except Exception:
# Assume error is result of an
# unexpected file in the directory. We
@@ -2321,7 +2317,7 @@
path = os.path.join(pdir, ver)
# Version must be decoded before
# use.
- pver = urllib.unquote(ver)
+ pver = unquote(ver)
try:
pfmri = fmri.PkgFmri("@".join((pname,
pver)),
@@ -3056,7 +3052,7 @@
finally:
# This ensures that the original exception and
# traceback are used.
- raise exc_value, None, exc_tb
+ six.reraise(exc_value, None, exc_tb)
def remove_publisher(self, pfxs, repo_path, synch=False):
"""Removes a repository storage area and configuration
@@ -3491,7 +3487,7 @@
"""
def merge(src, dest):
- for k, v in src.iteritems():
+ for k, v in six.iteritems(src):
if k in dest:
dest[k].extend(v)
else:
@@ -4346,7 +4342,7 @@
raised. Other errors can raise exceptions of class ApiException.
"""
- if isinstance(repo_uri, basestring):
+ if isinstance(repo_uri, six.string_types):
repo_uri = publisher.RepositoryURI(misc.parse_uri(repo_uri))
path = repo_uri.get_pathname()
@@ -4384,7 +4380,7 @@
# ...and this file (which can be empty).
try:
- with file(os.path.join(path, "cfg_cache"), "wb") as cf:
+ with open(os.path.join(path, "cfg_cache"), "wb") as cf:
cf.write("\n")
except EnvironmentError as e:
if e.errno == errno.EACCES:
--- a/src/modules/server/transaction.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/server/transaction.py Wed Jul 01 16:20:01 2015 -0700
@@ -31,8 +31,9 @@
import os
import re
import shutil
+import six
import time
-import urllib
+from six.moves.urllib.parse import quote, unquote
import pkg.actions as actions
import pkg.digest as digest
@@ -57,12 +58,6 @@
else:
self.data = None
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return str(self.data)
@@ -159,7 +154,7 @@
# XXX should the timestamp be in ISO format?
return "{0:d}_{1}".format(
calendar.timegm(self.open_time.utctimetuple()),
- urllib.quote(str(self.fmri), ""))
+ quote(str(self.fmri), ""))
def open(self, rstore, client_release, pfmri):
# Store a reference to the repository storage object.
@@ -171,12 +166,12 @@
if pfmri is None:
raise TransactionOperationError(pfmri=None)
- if not isinstance(pfmri, basestring):
+ if not isinstance(pfmri, six.string_types):
pfmri = str(pfmri)
self.client_release = client_release
self.pkg_name = pfmri
- self.esc_pkg_name = urllib.quote(pfmri, "")
+ self.esc_pkg_name = quote(pfmri, "")
# attempt to construct an FMRI object
try:
@@ -207,7 +202,7 @@
else:
pkg_name = pkg_name.replace("pkg:/", pub_string)
self.pkg_name = pkg_name
- self.esc_pkg_name = urllib.quote(pkg_name, "")
+ self.esc_pkg_name = quote(pkg_name, "")
# record transaction metadata: opening_time, package, user
# XXX publishing with a custom timestamp may require
@@ -217,7 +212,7 @@
# Strip the timestamp information for consistency with
# the case where it was not specified.
self.pkg_name = ":".join(pfmri.split(":")[:-1])
- self.esc_pkg_name = urllib.quote(self.pkg_name, "")
+ self.esc_pkg_name = quote(self.pkg_name, "")
else:
# A timestamp was not provided; try to generate a
# unique one.
@@ -251,7 +246,7 @@
# always create a minimal manifest
#
tfpath = os.path.join(self.dir, "manifest")
- tfile = file(tfpath, "ab+")
+ tfile = open(tfpath, "ab+")
# Build a set action containing the fully qualified FMRI and add
# it to the manifest. While it may seem inefficient to create
@@ -285,12 +280,12 @@
if pfmri is None:
raise TransactionOperationError(pfmri=None)
- if not isinstance(pfmri, basestring):
+ if not isinstance(pfmri, six.string_types):
pfmri = str(pfmri)
self.client_release = client_release
self.pkg_name = pfmri
- self.esc_pkg_name = urllib.quote(pfmri, "")
+ self.esc_pkg_name = quote(pfmri, "")
# attempt to construct an FMRI object
try:
@@ -321,7 +316,7 @@
else:
pkg_name = pkg_name.replace("pkg:/", pub_string)
self.pkg_name = pkg_name
- self.esc_pkg_name = urllib.quote(pkg_name, "")
+ self.esc_pkg_name = quote(pkg_name, "")
# record transaction metadata: opening_time, package, user
self.open_time = self.fmri.get_timestamp()
@@ -329,7 +324,7 @@
# Strip the timestamp information for consistency with
# the case where it was not specified.
self.pkg_name = ":".join(pfmri.split(":")[:-1])
- self.esc_pkg_name = urllib.quote(self.pkg_name, "")
+ self.esc_pkg_name = quote(self.pkg_name, "")
if not rstore.valid_append_fmri(self.fmri):
raise TransactionOperationError(missing_fmri=True,
@@ -369,7 +364,7 @@
self.open_time = \
datetime.datetime.utcfromtimestamp(int(open_time_str))
- self.pkg_name = urllib.unquote(self.esc_pkg_name)
+ self.pkg_name = unquote(self.esc_pkg_name)
# This conversion should always work, because we encoded the
# client release on the initial open of the transaction.
@@ -390,7 +385,7 @@
# Find out if the package is renamed or obsolete.
try:
tfpath = os.path.join(self.dir, "manifest")
- tfile = file(tfpath, tmode)
+ tfile = open(tfpath, tmode)
except IOError as e:
if e.errno == errno.ENOENT:
return
@@ -416,7 +411,7 @@
"""
def split_trans_id(tid):
m = re.match("(\d+)_(.*)", tid)
- return m.group(1), urllib.unquote(m.group(2))
+ return m.group(1), unquote(m.group(2))
trans_id = self.get_basename()
pkg_fmri = split_trans_id(trans_id)[1]
@@ -607,7 +602,7 @@
# Now that the action is known to be sane, we can add it to the
# manifest.
tfpath = os.path.join(self.dir, "manifest")
- tfile = file(tfpath, "ab+")
+ tfile = open(tfpath, "ab+")
print(action, file=tfile)
tfile.close()
--- a/src/modules/smf.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/smf.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,11 +27,13 @@
# This module provides a basic interface to smf.
import os
+import six
import pkg.pkgsubprocess as subprocess
from pkg.client import global_settings
from pkg.client.debugvalues import DebugValues
+from six.moves.urllib.parse import urlparse
logger = global_settings.logger
@@ -62,12 +64,6 @@
self.return_code = return_code
self.output = output
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
-
def __str__(self):
return "Cmd {0} exited with status {1:d}, and output '{2}'".format(
self.cmd, self.return_code, self.output)
@@ -129,7 +125,7 @@
from the set that is returned and an error message is logged.
"""
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = set([fmris])
chars = "*?[!^"
for fmri in fmris.copy():
@@ -191,7 +187,7 @@
def enable(fmris, temporary=False, sync_timeout=0, zone=None):
if not fmris:
return
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = (fmris,)
args = [svcadm_path, "enable"]
@@ -207,7 +203,7 @@
def disable(fmris, temporary=False, sync_timeout=0, zone=None):
if not fmris:
return
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = (fmris,)
args = [svcadm_path, "disable", "-s"]
if sync_timeout > 0:
@@ -220,7 +216,7 @@
def mark(state, fmris, zone=None):
if not fmris:
return
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = (fmris,)
args = [svcadm_path, "mark", state]
# fmris could be a list so explicit cast is necessary
@@ -229,7 +225,7 @@
def refresh(fmris, sync_timeout=0, zone=None):
if not fmris:
return
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = (fmris,)
args = [svcadm_path, "refresh"]
if sync_timeout:
@@ -242,7 +238,7 @@
def restart(fmris, sync_timeout=0, zone=None):
if not fmris:
return
- if isinstance(fmris, basestring):
+ if isinstance(fmris, six.string_types):
fmris = (fmris,)
args = [svcadm_path, "restart"]
if sync_timeout:
--- a/src/modules/sysvpkg.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/sysvpkg.py Wed Jul 01 16:20:01 2015 -0700
@@ -200,7 +200,7 @@
return []
try:
- fp = file(self.pkgpath + "/install/depend")
+ fp = open(self.pkgpath + "/install/depend")
except IOError as xxx_todo_changeme:
# Missing depend file is just fine
(err, msg) = xxx_todo_changeme.args
@@ -232,7 +232,7 @@
if self.datastream:
fp = self._pkginfo
else:
- fp = file(self.pkgpath + "/pkginfo")
+ fp = open(self.pkgpath + "/pkginfo")
for line in fp:
line = line.lstrip().rstrip('\n')
@@ -267,7 +267,7 @@
if self.datastream:
fp = self._pkgmap
else:
- fp = file(self.pkgpath + "/pkgmap")
+ fp = open(self.pkgpath + "/pkgmap")
for line in fp:
line = line.rstrip('\n')
--- a/src/modules/updatelog.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/updatelog.py Wed Jul 01 16:20:01 2015 -0700
@@ -146,17 +146,17 @@
# Verify that they aren't already in the catalog
catpath = os.path.normpath(os.path.join(path, "catalog"))
-
+
tmp_num, tmpfile = tempfile.mkstemp(dir=path)
tfile = os.fdopen(tmp_num, 'w')
try:
- pfile = file(catpath, "rb")
+ pfile = open(catpath, "rb")
except IOError as e:
if e.errno == errno.ENOENT:
# Creating an empty file
- file(catpath, "wb").close()
- pfile = file(catpath, "rb")
+ open(catpath, "wb").close()
+ pfile = open(catpath, "rb")
else:
tfile.close()
portable.remove(tmpfile)
@@ -187,7 +187,7 @@
portable.rename(tmpfile, catpath)
# Now re-write npkgs and Last-Modified in attributes file
- afile = file(os.path.normpath(os.path.join(path, "attrs")),
+ afile = open(os.path.normpath(os.path.join(path, "attrs")),
"r")
attrre = re.compile('^S ([^:]*): (.*)')
--- a/src/modules/version.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/modules/version.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,7 +29,7 @@
import time
import weakref
-from itertools import izip
+from six.moves import zip
CONSTRAINT_NONE = 0
CONSTRAINT_AUTO = 50
@@ -91,8 +91,8 @@
try:
list.__init__(self,
- map(DotSequence.dotsequence_val,
- dotstring.split(".")))
+ list(map(DotSequence.dotsequence_val,
+ dotstring.split("."))))
except ValueError:
raise IllegalDotSequence(dotstring)
@@ -113,7 +113,7 @@
if len(self) > len(other):
return False
- for a, b in izip(self, other):
+ for a, b in zip(self, other):
if a != b:
return False
return True
@@ -160,8 +160,8 @@
def __init__(self, dotstring):
try:
list.__init__(self,
- map(self.dotsequence_val,
- dotstring.split(".")))
+ list(map(self.dotsequence_val,
+ dotstring.split("."))))
except ValueError:
raise IllegalDotSequence(dotstring)
@@ -219,7 +219,7 @@
if len(self) > len(other):
return False
- for a, b in izip(self, other):
+ for a, b in zip(self, other):
if a != b:
return False
return True
--- a/src/pkg/external_deps.txt Tue Jun 30 11:44:33 2015 -0700
+++ b/src/pkg/external_deps.txt Wed Jul 01 16:20:01 2015 -0700
@@ -22,6 +22,7 @@
pkg:/library/python/pycurl-27
pkg:/library/python/pyopenssl-27
pkg:/library/python/simplejson-27
+ pkg:/library/python/six-27
pkg:/package/svr4
pkg:/runtime/python-27
pkg:/runtime/python-34
--- a/src/pkg/manifests/package:pkg.p5m Tue Jun 30 11:44:33 2015 -0700
+++ b/src/pkg/manifests/package:pkg.p5m Wed Jul 01 16:20:01 2015 -0700
@@ -64,7 +64,11 @@
file path=$(PYDIRVP)/pkg/actions/group.py
file path=$(PYDIRVP)/pkg/actions/hardlink.py
file path=$(PYDIRVP)/pkg/actions/legacy.py
-file path=$(PYDIRVP)/pkg/actions/license.py
+#
+# Don't worry about the "from x.y.z" six imports since pkgdepend has some issues
+# with the python importer. Instead, we force a dependency on the six package.
+#
+file path=$(PYDIRVP)/pkg/actions/license.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/actions/link.py
file path=$(PYDIRVP)/pkg/actions/signature.py
file path=$(PYDIRVP)/pkg/actions/unknown.py
@@ -84,25 +88,27 @@
dir path=$(PYDIRVP)/pkg/client
file path=$(PYDIRVP)/pkg/client/__init__.py
file path=$(PYDIRVP)/pkg/client/actuator.py
-file path=$(PYDIRVP)/pkg/client/api.py
-file path=$(PYDIRVP)/pkg/client/api_errors.py
+file path=$(PYDIRVP)/pkg/client/api.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/api_errors.py pkg.depend.bypass-generate=.*six.*
#
# Don't worry about the libbe import; the python code looks for it in case
# it can't import libbe_py, and is graceful in the face of its absence.
#
file path=$(PYDIRVP)/pkg/client/bootenv.py pkg.depend.bypass-generate=.*libbe.*
-file path=$(PYDIRVP)/pkg/client/client_api.py
+file path=$(PYDIRVP)/pkg/client/client_api.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/client/debugvalues.py
file path=$(PYDIRVP)/pkg/client/firmware.py
file path=$(PYDIRVP)/pkg/client/history.py
-file path=$(PYDIRVP)/pkg/client/image.py
-file path=$(PYDIRVP)/pkg/client/imageconfig.py
+file path=$(PYDIRVP)/pkg/client/image.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/imageconfig.py \
+ pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/client/imageplan.py
file path=$(PYDIRVP)/pkg/client/imagetypes.py
file path=$(PYDIRVP)/pkg/client/indexer.py
dir path=$(PYDIRVP)/pkg/client/linkedimage
file path=$(PYDIRVP)/pkg/client/linkedimage/__init__.py
-file path=$(PYDIRVP)/pkg/client/linkedimage/common.py
+file path=$(PYDIRVP)/pkg/client/linkedimage/common.py \
+ pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/client/linkedimage/system.py
file path=$(PYDIRVP)/pkg/client/linkedimage/zone.py
file path=$(PYDIRVP)/pkg/client/options.py
@@ -112,24 +118,30 @@
file path=$(PYDIRVP)/pkg/client/pkgremote.py
file path=$(PYDIRVP)/pkg/client/plandesc.py
file path=$(PYDIRVP)/pkg/client/printengine.py
-file path=$(PYDIRVP)/pkg/client/progress.py
-file path=$(PYDIRVP)/pkg/client/publisher.py
+file path=$(PYDIRVP)/pkg/client/progress.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/publisher.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/client/query_parser.py
file path=$(PYDIRVP)/pkg/client/rad_pkg.py
file path=$(PYDIRVP)/pkg/client/sigpolicy.py
dir path=$(PYDIRVP)/pkg/client/transport
file path=$(PYDIRVP)/pkg/client/transport/__init__.py
-file path=$(PYDIRVP)/pkg/client/transport/engine.py
-file path=$(PYDIRVP)/pkg/client/transport/exception.py
+file path=$(PYDIRVP)/pkg/client/transport/engine.py \
+ pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/transport/exception.py \
+ pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/client/transport/fileobj.py
-file path=$(PYDIRVP)/pkg/client/transport/mdetect.py
-file path=$(PYDIRVP)/pkg/client/transport/repo.py
-file path=$(PYDIRVP)/pkg/client/transport/stats.py
-file path=$(PYDIRVP)/pkg/client/transport/transport.py
-file path=$(PYDIRVP)/pkg/config.py
-file path=$(PYDIRVP)/pkg/cpiofile.py
+file path=$(PYDIRVP)/pkg/client/transport/mdetect.py \
+ pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/transport/repo.py \
+ pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/transport/stats.py \
+ pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/client/transport/transport.py \
+ pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/config.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/cpiofile.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/dependency.py
-file path=$(PYDIRVP)/pkg/depotcontroller.py
+file path=$(PYDIRVP)/pkg/depotcontroller.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/digest.py
file path=$(PYDIRVP)/pkg/elf.so
file path=$(PYDIRVP)/pkg/facet.py
@@ -148,26 +160,27 @@
file path=$(PYDIRVP)/pkg/flavor/python.py
file path=$(PYDIRVP)/pkg/flavor/script.py
file path=$(PYDIRVP)/pkg/flavor/smf_manifest.py
-file path=$(PYDIRVP)/pkg/fmri.py
-file path=$(PYDIRVP)/pkg/indexer.py
+file path=$(PYDIRVP)/pkg/fmri.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/indexer.py pkg.depend.bypass-generate=.*six.*
dir path=$(PYDIRVP)/pkg/lint
file path=$(PYDIRVP)/pkg/lint/__init__.py
-file path=$(PYDIRVP)/pkg/lint/base.py
-file path=$(PYDIRVP)/pkg/lint/config.py
-file path=$(PYDIRVP)/pkg/lint/engine.py
+file path=$(PYDIRVP)/pkg/lint/base.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/lint/config.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/lint/engine.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/lint/log.py
file path=$(PYDIRVP)/pkg/lint/opensolaris.py
file path=$(PYDIRVP)/pkg/lint/pkglint_action.py
-file path=$(PYDIRVP)/pkg/lint/pkglint_manifest.py
+file path=$(PYDIRVP)/pkg/lint/pkglint_manifest.py \
+ pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/lockfile.py
-file path=$(PYDIRVP)/pkg/manifest.py
+file path=$(PYDIRVP)/pkg/manifest.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/mediator.py
-file path=$(PYDIRVP)/pkg/misc.py
+file path=$(PYDIRVP)/pkg/misc.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/nrlock.py
-file path=$(PYDIRVP)/pkg/p5i.py
-file path=$(PYDIRVP)/pkg/p5p.py
-file path=$(PYDIRVP)/pkg/p5s.py
-file path=$(PYDIRVP)/pkg/pipeutils.py
+file path=$(PYDIRVP)/pkg/p5i.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/p5p.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/p5s.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/pipeutils.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/pkggzip.py
file path=$(PYDIRVP)/pkg/pkgsubprocess.py
file path=$(PYDIRVP)/pkg/pkgtarfile.py
@@ -186,32 +199,35 @@
file path=$(PYDIRVP)/pkg/pspawn.so
dir path=$(PYDIRVP)/pkg/publish
file path=$(PYDIRVP)/pkg/publish/__init__.py
-file path=$(PYDIRVP)/pkg/publish/dependencies.py
-file path=$(PYDIRVP)/pkg/publish/transaction.py
+file path=$(PYDIRVP)/pkg/publish/dependencies.py \
+ pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/publish/transaction.py \
+ pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/query_parser.py
file path=$(PYDIRVP)/pkg/search_errors.py
-file path=$(PYDIRVP)/pkg/search_storage.py
+file path=$(PYDIRVP)/pkg/search_storage.py pkg.depend.bypass-generate=.*six.*
dir path=$(PYDIRVP)/pkg/server
file path=$(PYDIRVP)/pkg/server/__init__.py
file path=$(PYDIRVP)/pkg/server/api.py
file path=$(PYDIRVP)/pkg/server/api_errors.py
file path=$(PYDIRVP)/pkg/server/catalog.py
-file path=$(PYDIRVP)/pkg/server/depot.py
+file path=$(PYDIRVP)/pkg/server/depot.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/server/depotresponse.py
-file path=$(PYDIRVP)/pkg/server/face.py
-file path=$(PYDIRVP)/pkg/server/feed.py
+file path=$(PYDIRVP)/pkg/server/face.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/server/feed.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/server/query_parser.py
-file path=$(PYDIRVP)/pkg/server/repository.py
-file path=$(PYDIRVP)/pkg/server/transaction.py
+file path=$(PYDIRVP)/pkg/server/repository.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/server/transaction.py \
+ pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/sha512_t.so
-file path=$(PYDIRVP)/pkg/smf.py
+file path=$(PYDIRVP)/pkg/smf.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/solver.so
file path=$(PYDIRVP)/pkg/sysattr.so
file path=$(PYDIRVP)/pkg/syscallat.so
file path=$(PYDIRVP)/pkg/sysvpkg.py
file path=$(PYDIRVP)/pkg/updatelog.py
file path=$(PYDIRVP)/pkg/variant.py
-file path=$(PYDIRVP)/pkg/version.py
+file path=$(PYDIRVP)/pkg/version.py pkg.depend.bypass-generate=.*six.*
dir path=etc
dir path=etc/security
dir path=etc/security/auth_attr.d
@@ -242,15 +258,15 @@
file path=usr/bin/pkgdiff
file path=usr/bin/pkgfmt
file path=usr/bin/pkglint
-file path=usr/bin/pkgmerge
+file path=usr/bin/pkgmerge pkg.depend.bypass-generate=.*six.*
file path=usr/bin/pkgmogrify
-file path=usr/bin/pkgrecv
+file path=usr/bin/pkgrecv pkg.depend.bypass-generate=.*six.*
file path=usr/bin/pkgrepo
file path=usr/bin/pkgsend
file path=usr/bin/pkgsign
file path=usr/bin/pkgsurf
dir path=usr/lib
-file path=usr/lib/pkg.depotd mode=0755
+file path=usr/lib/pkg.depotd mode=0755 pkg.depend.bypass-generate=.*six.*
dir path=usr/share
dir path=usr/share/lib
dir path=usr/share/lib/pkg
--- a/src/pkg/manifests/package:pkg:system-repository.p5m Tue Jun 30 11:44:33 2015 -0700
+++ b/src/pkg/manifests/package:pkg:system-repository.p5m Wed Jul 01 16:20:01 2015 -0700
@@ -44,7 +44,11 @@
file path=lib/svc/method/svc-pkg-sysrepo
file path=lib/svc/method/svc-pkg-sysrepo-cache
dir path=usr
-file path=usr/lib/pkg.sysrepo mode=0755
+#
+# Don't worry about the "from x.y.z" six imports since pkgdepend has some issues
+# with the python importer. Instead, we force a dependency on the six package.
+#
+file path=usr/lib/pkg.sysrepo mode=0755 pkg.depend.bypass-generate=.*six.*
dir path=usr/share/man/ja_JP.UTF-8/man1m
file path=usr/share/man/ja_JP.UTF-8/man1m/pkg.sysrepo.1m
dir path=usr/share/man/man1m
@@ -63,6 +67,8 @@
file path=var/log/pkg/sysrepo/error_log owner=pkg5srv mode=0644 preserve=true
file path=var/log/pkg/sysrepo/rewrite.log owner=pkg5srv mode=0644 preserve=true
license cr_Oracle license=cr_Oracle
+# force a dependency on the six package because we bypass dependency check for it
+depend type=require fmri=library/python/six-27
#
# The manual dependency on apache results from our calling apachectl from
# our method script, and can't be detected by pkgdepend.
--- a/src/pkgdep.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/pkgdep.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,6 +29,7 @@
import gettext
import locale
import os
+import six
import sys
import traceback
import warnings
@@ -192,7 +193,7 @@
for d in sorted(ds):
msg(d)
- for key, value in pkg_attrs.iteritems():
+ for key, value in six.iteritems(pkg_attrs):
msg(actions.attribute.AttributeAction(**{key: value}))
if show_missing:
--- a/src/pkgrepo.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/pkgrepo.py Wed Jul 01 16:20:01 2015 -0700
@@ -57,6 +57,7 @@
import operator
import shlex
import shutil
+import six
import sys
import tempfile
import textwrap
@@ -1139,7 +1140,7 @@
# Determine if the query returned any results by "peeking" at the first
# value returned from the generator expression.
try:
- got = gen_expr.next()
+ got = next(gen_expr)
except StopIteration:
got = None
actionlist = []
@@ -1420,7 +1421,7 @@
def _set_pub(conf, subcommand, props, pubs, repo):
"""Set publisher properties."""
- for sname, sprops in props.iteritems():
+ for sname, sprops in six.iteritems(props):
if sname not in ("publisher", "repository"):
usage(_("unknown property section "
"'{0}'").format(sname), cmd=subcommand)
@@ -1460,7 +1461,7 @@
try:
# Set/update the publisher's properties.
- for sname, sprops in props.iteritems():
+ for sname, sprops in six.iteritems(props):
if sname == "publisher":
target = pub
elif sname == "repository":
@@ -1469,7 +1470,7 @@
target = publisher.Repository()
pub.repository = target
- for pname, val in sprops.iteritems():
+ for pname, val in six.iteritems(sprops):
attrname = pname.replace("-", "_")
pval = getattr(target, attrname)
if isinstance(pval, list) and \
@@ -1506,8 +1507,8 @@
"""Set repository properties."""
# Set properties.
- for sname, props in props.iteritems():
- for pname, val in props.iteritems():
+ for sname, props in six.iteritems(props):
+ for pname, val in six.iteritems(props):
repo.cfg.set_property(sname, pname, val)
repo.write_config()
@@ -2117,7 +2118,7 @@
info_table = PrettyTable(res_dict["table_header"],
encoding=locale.getpreferredencoding())
info_table.align = "r"
- info_table.align[unicode(_("Publisher"),
+ info_table.align[six.text_type(_("Publisher"),
locale.getpreferredencoding())] = "l"
# Calculate column wise maximum number for formatting.
col_maxs = 4 * [0]
@@ -2133,7 +2134,7 @@
for idx, cell in enumerate(td):
if not cell:
t_row.append("-")
- elif isinstance(cell, basestring):
+ elif isinstance(cell, six.string_types):
t_row.append(cell)
elif isinstance(cell, dict):
t_row.append(ftemp.format(
--- a/src/publish.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/publish.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,6 +30,7 @@
import gettext
import locale
import os
+import six
import sys
import traceback
import warnings
@@ -54,7 +55,7 @@
def error(text, cmd=None):
"""Emit an error message prefixed by the command name """
- if not isinstance(text, basestring):
+ if not isinstance(text, six.string_types):
# Assume it's an object that can be stringified.
text = str(text)
@@ -334,7 +335,7 @@
filelist = [("<stdin>", sys.stdin)]
else:
try:
- filelist = [(f, file(f)) for f in pargs]
+ filelist = [(f, open(f)) for f in pargs]
except IOError as e:
error(e, cmd="publish")
return 1
@@ -435,7 +436,7 @@
basename = os.path.basename(a.attrs["path"])
for pattern in timestamp_files:
if fnmatch.fnmatch(basename, pattern):
- if not isinstance(path, basestring):
+ if not isinstance(path, six.string_types):
# Target is from bundle; can't
# apply timestamp now.
continue
@@ -486,7 +487,7 @@
filelist = [("<stdin>", sys.stdin)]
else:
try:
- filelist = [(f, file(f)) for f in pargs]
+ filelist = [(f, open(f)) for f in pargs]
except IOError as e:
error(e, cmd="include")
return 1
--- a/src/pull.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/pull.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,7 +34,6 @@
import sys
import tempfile
import traceback
-import urllib
import warnings
import pkg.catalog as catalog
@@ -55,6 +54,7 @@
from pkg.client import global_settings
from pkg.misc import emsg, get_pkg_otw_size, msg, PipeError
from pkg.client.debugvalues import DebugValues
+from six.moves.urllib.parse import quote
# Globals
archive = False
@@ -1226,7 +1226,7 @@
open_time = pfmri.get_timestamp()
return "{0:d}_{1}".format(
calendar.timegm(open_time.utctimetuple()),
- urllib.quote(str(pfmri), ""))
+ quote(str(pfmri), ""))
# First, retrieve the manifests and calculate package transfer
# sizes.
--- a/src/setup.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/setup.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,6 +27,7 @@
import fnmatch
import os
import platform
+import six
import stat
import sys
import shutil
@@ -34,11 +35,9 @@
import subprocess
import tarfile
import tempfile
-import urllib
import py_compile
import hashlib
import time
-import StringIO
from distutils.errors import DistutilsError, DistutilsFileError
from distutils.core import setup
@@ -720,7 +719,7 @@
# Don't install the scripts for python 3.4.
# if py_version == '3.4':
# return
- for d, files in scripts[osname].iteritems():
+ for d, files in six.iteritems(scripts[osname]):
for (srcname, dstname) in files:
dst_dir = util.change_root(self.root_dir, d)
dst_path = util.change_root(self.root_dir,
@@ -773,7 +772,7 @@
self.outfiles.append(dir)
else:
for file in files:
- if isinstance(file, basestring):
+ if isinstance(file, six.string_types):
infile = file
outfile = os.path.join(dir,
os.path.basename(file))
@@ -828,7 +827,7 @@
cddl_re = re.compile("\n(#\s*\n)?^[^\n]*CDDL HEADER START.+"
"CDDL HEADER END[^\n]*$(\n#\s*$)?", re.MULTILINE|re.DOTALL)
- with file(src, "r") as sfp:
+ with open(src, "r") as sfp:
try:
os.unlink(dst)
except EnvironmentError as e:
@@ -836,7 +835,7 @@
raise DistutilsFileError("could not delete "
"'{0}': {1}".format(dst, e))
- with file(dst, "w") as dfp:
+ with open(dst, "w") as dfp:
while True:
buf = sfp.read(buffer_size)
if not buf:
@@ -1063,7 +1062,7 @@
def finalize_options(self):
if self.mode is None:
self.mode = 0o644
- elif isinstance(self.mode, basestring):
+ elif isinstance(self.mode, six.string_types):
try:
self.mode = int(self.mode, 8)
except ValueError:
@@ -1104,7 +1103,7 @@
except py_compile.PyCompileError as e:
res = ""
for err in e.exc_value:
- if isinstance(err, basestring):
+ if isinstance(err, six.string_types):
res += err + "\n"
continue
@@ -1246,7 +1245,7 @@
# tree.
try:
ocontent = \
- file(self.get_module_outfile(self.build_lib,
+ open(self.get_module_outfile(self.build_lib,
[package], module)).read()
ov = re.search(versionre, ocontent).group(1)
except IOError:
@@ -1258,7 +1257,7 @@
if v == ov:
return
- mcontent = file(module_file).read()
+ mcontent = open(module_file).read()
mcontent = re.sub(versionre, vstr, mcontent)
tmpfd, tmp_file = tempfile.mkstemp()
os.write(tmpfd, mcontent)
--- a/src/sysrepo.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/sysrepo.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,14 +33,17 @@
import os
import shutil
import simplejson
+import six
import socket
import stat
import sys
import traceback
-import urllib2
import warnings
from mako.template import Template
+from six.moves.urllib.error import URLError
+from six.moves.urllib.parse import urlparse
+from six.moves.urllib.request import build_opener, HTTPRedirectHandler
from pkg.client import global_settings
from pkg.misc import msg, PipeError
@@ -120,11 +123,7 @@
SYSREPO_GROUP = "pkg5srv"
class SysrepoException(Exception):
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
+ pass
@atexit.register
def cleanup():
@@ -208,7 +207,7 @@
ret_uris = set(uri_list)
timed_out = False
- class SysrepoRedirectHandler(urllib2.HTTPRedirectHandler):
+ class SysrepoRedirectHandler(HTTPRedirectHandler):
""" A HTTPRedirectHandler that saves URIs we've been
redirected to along the path to our eventual destination."""
def __init__(self):
@@ -216,12 +215,12 @@
def redirect_request(self, req, fp, code, msg, hdrs, newurl):
self.redirects.add(newurl)
- return urllib2.HTTPRedirectHandler.redirect_request(
+ return HTTPRedirectHandler.redirect_request(
self, req, fp, code, msg, hdrs, newurl)
for uri in uri_list:
handler = SysrepoRedirectHandler()
- opener = urllib2.build_opener(handler)
+ opener = build_opener(handler)
if not uri.startswith("http:"):
ret_uris.update([uri])
continue
@@ -233,7 +232,7 @@
ret_uris.update(set(
[item.replace("/versions/0", "").rstrip("/")
for item in handler.redirects]))
- except urllib2.URLError as err:
+ except URLError as err:
# We need to log this, and carry on - the url
# could become available at a later date.
msg(_("WARNING: unable to access {uri} when checking "
@@ -259,7 +258,7 @@
if not isinstance(pub_info, dict):
raise SysrepoException("{0} is not a dict".format(pub_info))
for uri in pub_info:
- if not isinstance(uri, basestring):
+ if not isinstance(uri, six.string_types):
raise SysrepoException("{0} is not a basestring".format(
uri))
uri_info = pub_info[uri]
@@ -271,13 +270,13 @@
raise SysrepoException("{0} does not have 6 "
"items".format(props))
# props [0] and [3] must be strings
- if not isinstance(props[0], basestring) or \
- not isinstance(props[3], basestring):
+ if not isinstance(props[0], six.string_types) or \
+ not isinstance(props[3], six.string_types):
raise SysrepoException("indices 0 and 3 of {0} "
"are not basestrings".format(props))
# prop[5] must be a string, either "file" or "dir"
# and prop[0] must start with file://
- if not isinstance(props[5], basestring) or \
+ if not isinstance(props[5], six.string_types) or \
(props[5] not in ["file", "dir"] and
props[0].startswith("file://")):
raise SysrepoException("index 5 of {0} is not a "
@@ -287,7 +286,7 @@
if not isinstance(no_uri_pubs, list):
raise SysrepoException("{0} is not a list".format(no_uri_pubs))
for item in no_uri_pubs:
- if not isinstance(item, basestring):
+ if not isinstance(item, six.string_types):
raise SysrepoException(
"{0} is not a basestring".format(item))
@@ -403,7 +402,7 @@
any authentication details since these are not supported by ProxyRemote.
"""
- u = urllib2.urlparse.urlparse(proxy)
+ u = urlparse(proxy)
netloc_parts = u.netloc.split("@")
# If we don't have any authentication details, return.
if len(netloc_parts) == 1:
@@ -479,7 +478,7 @@
if uri.startswith("file:"):
# we only support p5p files and directory-based
# repositories of >= version 4.
- urlresult = urllib2.urlparse.urlparse(uri)
+ urlresult = urlparse(uri)
utype = "dir"
if not os.path.exists(urlresult.path):
raise SysrepoException(
@@ -595,7 +594,7 @@
if not val:
continue
try:
- result = urllib2.urlparse.urlparse(val)
+ result = urlparse(val)
if result.scheme != "http":
raise Exception(
_("scheme must be http"))
@@ -633,7 +632,7 @@
https_proxy=https_proxy)
httpd_conf_path = os.path.join(runtime_dir,
SYSREPO_HTTP_FILENAME)
- httpd_conf_file = file(httpd_conf_path, "wb")
+ httpd_conf_file = open(httpd_conf_path, "wb")
httpd_conf_file.write(httpd_conf_text)
httpd_conf_file.close()
except socket.gaierror as err:
@@ -650,7 +649,7 @@
try:
crypto_path = os.path.join(runtime_dir, SYSREPO_CRYPTO_FILENAME)
- file(crypto_path, "w").close()
+ open(crypto_path, "w").close()
os.chmod(crypto_path, 0o600)
written_crypto_content = False
@@ -658,15 +657,15 @@
for (pub, cert_path, key_path, hash, proxy, utype) in \
repo_list:
if cert_path and key_path:
- crypto_file = file(crypto_path, "a")
- crypto_file.writelines(file(cert_path))
- crypto_file.writelines(file(key_path))
+ crypto_file = open(crypto_path, "a")
+ crypto_file.writelines(open(cert_path))
+ crypto_file.writelines(open(key_path))
crypto_file.close()
written_crypto_content = True
# Apache needs us to have some content in this file
if not written_crypto_content:
- crypto_file = file(crypto_path, "w")
+ crypto_file = open(crypto_path, "w")
crypto_file.write(
"# this space intentionally left blank\n")
crypto_file.close()
@@ -707,7 +706,7 @@
[htdocs_path, pub, hash] +
SYSREPO_PUB_DIRNAME)
os.makedirs(publisher_path)
- publisher_file = file(
+ publisher_file = open(
os.path.sep.join([publisher_path,
SYSREPO_PUB_FILENAME]), "w")
publisher_file.write(publisher_text)
@@ -724,7 +723,7 @@
os.path.sep.join(SYSREPO_VERSIONS_DIRNAME))
os.makedirs(versions_path)
- versions_file = file(os.path.join(versions_path, "index.html"),
+ versions_file = open(os.path.join(versions_path, "index.html"),
"w")
versions_file.write(SYSREPO_VERSIONS_STR)
versions_file.close()
--- a/src/tests/api/t_action.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_action.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,6 +29,7 @@
testutils.setup_environment("../../../proto")
import pkg5unittest
+import six
import unittest
import pkg.actions as action
import pkg.actions.generic as generic
@@ -299,7 +300,7 @@
}
astr = "file {0} path=usr/bin/foo mode=0755 owner=root group=bin"
- for k, v in d.iteritems():
+ for k, v in six.iteritems(d):
a = action.fromstr(astr.format(k))
self.assert_(action.fromstr(str(a)) == a)
self.assert_(a.hash == v)
@@ -543,7 +544,7 @@
"mediator-implementation": "svr4",
"mediator-priority": "site",
}
- for prop, val in props.iteritems():
+ for prop, val in six.iteritems(props):
nact = "{0} path=usr/bin/vi " \
"target=../sunos/bin/edit {1}={2}".format(aname,
prop, val)
@@ -551,7 +552,7 @@
# Action with multiple values for any property is
# invalid.
- for prop, val in props.iteritems():
+ for prop, val in six.iteritems(props):
nact = "{0} path=usr/bin/vi " \
"target=../sunos/bin/edit mediator=vi " \
"{1}={2} {3}={4} ".format(aname, prop, val, prop,
--- a/src/tests/api/t_api_search.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_api_search.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,10 +30,12 @@
import copy
import os
import shutil
+import six
import tempfile
import time
import unittest
-import urllib2
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import urlopen
import pkg.client.api as api
import pkg.client.api_errors as api_errors
@@ -1968,7 +1970,7 @@
self._search_op(api_obj, False, 'example', set())
orig_fn = os.path.join(index_dir,
- query_parser.TermQuery._get_gdd(index_dir).values()[0].\
+ list(query_parser.TermQuery._get_gdd(index_dir).values())[0].\
get_file_name())
dest_fn = orig_fn + "TMP"
@@ -2017,10 +2019,10 @@
"least one of those fields:")
expected_code = 404
q_str = "foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_02(self):
"""Test that a corrupt case_sensitive value doesn't break the "
@@ -2032,10 +2034,10 @@
)
expected_code = 404
q_str = "FAlse_2_None_None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_03(self):
"""Test that a corrupt return_type value doesn't break the "
@@ -2047,10 +2049,10 @@
)
expected_code = 404
q_str = "False_3_None_None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_04(self):
"""Test that a corrupt return_type value doesn't break the "
@@ -2062,10 +2064,10 @@
)
expected_code = 404
q_str = "False_A_None_None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_05(self):
"""Test that a corrupt num_to_return value doesn't break the "
@@ -2077,10 +2079,10 @@
)
expected_code = 404
q_str = "False_2_NOne_None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_06(self):
"""Test that a corrupt start_point value doesn't break the "
@@ -2092,10 +2094,10 @@
)
expected_code = 404
q_str = "False_2_None_NOne_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_07(self):
"""Test that a corrupt case_sensitive value doesn't break the "
@@ -2107,10 +2109,10 @@
)
expected_code = 404
q_str = "_2_None_None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_08(self):
"""Test that a missing return_type value doesn't break the "
@@ -2122,10 +2124,10 @@
)
expected_code = 404
q_str = "False__None_None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_09(self):
"""Test that a missing num_to_return value doesn't break the "
@@ -2137,10 +2139,10 @@
)
expected_code = 404
q_str = "False_2__None_foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_10(self):
"""Test that a missing start_point value doesn't break the "
@@ -2152,10 +2154,10 @@
)
expected_code = 404
q_str = "False_2_None__foo"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_9845_11(self):
"""Test that missing query text doesn't break the server."""
@@ -2163,10 +2165,10 @@
expected_string = _("Could not parse query.")
expected_code = 400
q_str = "False_2_None_None_"
- self.validateAssertRaises(urllib2.HTTPError,
+ self.validateAssertRaises(HTTPError,
lambda x: self._check_err(x, expected_string,
expected_code),
- urllib2.urlopen, durl + "/search/1/" + q_str)
+ urlopen, durl + "/search/1/" + q_str)
def test_bug_14177(self):
def run_tests(api_obj, remote):
@@ -2294,7 +2296,7 @@
prune_versions=False)
self._search_op(api_obj, True, "</bin>", res_both_packages,
return_actions=False, prune_versions=False)
-
+
# Check that after uninstall, back to returning all versions.
self._api_uninstall(api_obj, ["example_pkg"])
self._search_op(api_obj, True, "/bin", res_both_actions)
@@ -2428,7 +2430,7 @@
self.res_remote_path)
self._search_op(api_obj, True, "example_path",
self.res_remote_path, servers=[{"origin": durl}])
- lfh = file(self.dc.get_logpath(), "rb")
+ lfh = open(self.dc.get_logpath(), "rb")
found = 0
num_expected = 7
for line in lfh:
@@ -2558,7 +2560,7 @@
fmris = indexer.Indexer.check_for_updates(ind_dir,
self._get_repo_catalog())
self.assertEqual(set(), fmris)
-
+
back_dir = ind_dir + ".BACKUP"
shutil.copytree(ind_dir, back_dir)
self.pkgsend_bulk(durl, self.example_pkg10)
@@ -2656,7 +2658,7 @@
"Didn't get expected error:{0}".format(err))
else:
return TestApiSearchBasics._extract_action_from_res(it)
-
+
def _search_op(self, api_obj, remote, token, test_value,
case_sensitive=False, return_actions=True, num_to_return=None,
@@ -2717,7 +2719,7 @@
c_uuid = pub.client_uuid
except api_errors.UnknownPublisher:
c_uuid = None
- lfh = file(self.dcs[d].get_logpath(), "rb")
+ lfh = open(self.dcs[d].get_logpath(), "rb")
found = 0
for line in lfh:
if "X-IPKG-UUID:" in line:
--- a/src/tests/api/t_catalog.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_catalog.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import os
import shutil
import simplejson
+import six
import stat
import unittest
@@ -105,7 +106,7 @@
def __gen_manifest(self, f):
m = manifest.Manifest()
- lines = unicode(
+ lines = six.text_type(
"depend [email protected] type=require\n"
"set name=facet.devel value=true\n"
"set name=info.classification "
@@ -480,7 +481,7 @@
c = catalog.Catalog(meta_root=cpath, log_updates=True)
# Verify that a newly created catalog has no signature data.
- for sigs in c.signatures.itervalues():
+ for sigs in six.itervalues(c.signatures):
self.assertEqual(len(sigs), 0)
# Verify that a newly created catalog will validate since no
@@ -503,7 +504,7 @@
self.assertTrue("catalog.base.C" in old_sigs)
updates = 0
- for fname, sigs in old_sigs.iteritems():
+ for fname, sigs in six.iteritems(old_sigs):
self.assertTrue(len(sigs) >= 1)
if fname.startswith("update."):
@@ -847,7 +848,7 @@
# Verify that the updates available to the original
# catalog are the same as the updated needed to update
# the duplicate.
- self.assertEqual(src.updates.keys(), updates)
+ self.assertEqual(list(src.updates.keys()), updates)
# Apply original catalog's updates to the duplicate.
dest.apply_updates(src.meta_root)
--- a/src/tests/api/t_config.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_config.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,7 +25,6 @@
# Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved.
#
-import testutils
if __name__ == "__main__":
testutils.setup_environment("../../../proto")
import pkg5unittest
@@ -36,6 +35,7 @@
import re
import shutil
import signal
+import six
import stat
import tempfile
import time
@@ -102,14 +102,14 @@
# Verify that the stringified form of the property's
# value matches what is expected.
p1 = propcls(propname, default=val)
- self.assertEqual(unicode(p1), expstr)
+ self.assertEqual(six.text_type(p1), expstr)
self.assertEqual(str(p1), expstr.encode("utf-8"))
# Verify that a property value's stringified form
# provides can be parsed into an exact equivalent
# in native form (e.g. list -> string -> list).
p2 = propcls(propname)
- p2.value = unicode(p1)
+ p2.value = six.text_type(p1)
self.assertEqual(p1.value, p2.value)
self.assertEqualDiff(str(p1), str(p2))
@@ -120,7 +120,7 @@
def __verify_ex_stringify(self, ex):
encs = str(ex)
self.assertNotEqual(len(encs), 0)
- unis = unicode(ex)
+ unis = six.text_type(ex)
self.assertNotEqual(len(unis), 0)
self.assertEqualDiff(encs, unis.encode("utf-8"))
@@ -838,7 +838,7 @@
def __verify_stringify(self, cls, explist):
for val, expstr in explist:
- self.assertEqual(unicode(cls(val)), expstr)
+ self.assertEqual(six.text_type(cls(val)), expstr)
self.assertEqual(str(cls(val)), expstr.encode("utf-8"))
def test_base(self):
@@ -1252,8 +1252,8 @@
conf = cfg.Config(definitions=self._defs, overrides=overrides,
version=0)
exp_state = copy.deepcopy(self._initial_state[0])
- for sname, props in overrides.iteritems():
- for pname, value in props.iteritems():
+ for sname, props in six.iteritems(overrides):
+ for pname, value in six.iteritems(props):
exp_state[sname][pname] = value
self._verify_initial_state(conf, 0, exp_state=exp_state)
@@ -1289,7 +1289,7 @@
str_basic =
bool_basic = False
-""", unicode(conf))
+""", six.text_type(conf))
conf.set_property("first_section", "str_basic", TH_PACKAGE)
self.assertEqualDiff(u"""\
@@ -1297,7 +1297,7 @@
str_basic = {0}
bool_basic = False
-""".format(TH_PACKAGE), unicode(conf))
+""".format(TH_PACKAGE), six.text_type(conf))
# Verify target is None.
self.assertEqual(conf.target, None)
@@ -1424,7 +1424,7 @@
portable.remove(scpath)
# Verify read and write of sample files.
- for ver, content in self._initial_files.iteritems():
+ for ver, content in six.iteritems(self._initial_files):
scpath = self.make_misc_files({
"cfg_cache": content })[0]
@@ -1681,7 +1681,7 @@
True)
conf = cfg.Config(definitions=self._templated_defs, version=1)
- self.assertEqualDiff([], conf.get_index().keys())
+ self.assertEqualDiff([], list(conf.get_index().keys()))
conf.set_property("authority_example.com", "prefix",
"example.com")
@@ -1955,7 +1955,7 @@
def __verify_ex_stringify(self, ex):
encs = str(ex)
self.assertNotEqual(len(encs), 0)
- unis = unicode(ex)
+ unis = six.text_type(ex)
self.assertNotEqual(len(unis), 0)
self.assertEqualDiff(encs, unis.encode("utf-8"))
@@ -2095,7 +2095,7 @@
# attempted (not currently supported).
self.assertRaises(cfg.SMFWriteError, conf.write)
- for ver, mfst_content in self._initial_files.iteritems():
+ for ver, mfst_content in six.iteritems(self._initial_files):
test_mfst(svc_fmri, ver, mfst_content, self._defs)
# Verify configuration data with unknown sections or properties
--- a/src/tests/api/t_dependencies.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_dependencies.py Wed Jul 01 16:20:01 2015 -0700
@@ -1134,7 +1134,7 @@
pdir = "usr/lib/python{0}/vendor-packages".format(py_version)
self.make_proto_text_file("{0}/cProfile.py".format(pdir),
"#!/usr/bin/python\n\\1" + self.python_module_text)
-
+
def make_smf_test_files(self):
for manifest in self.smf_paths.keys():
self.make_proto_text_file(self.paths[manifest],
@@ -1229,7 +1229,7 @@
def test_ext_script(self):
"""Check that a file that starts with #! and references a file
outside its package is reported as a dependency."""
-
+
def _check_res(res):
ds, es, ms, pkg_attrs = res
if es != []:
@@ -1296,7 +1296,7 @@
else:
raise RuntimeError("Unexpected "
"dependency path:{0}".format(d))
-
+
def test_ext_elf(self):
"""Check that an elf file that requires a library outside its
package is reported as a dependency."""
@@ -1987,7 +1987,7 @@
if len(ms) != 1:
raise RuntimeError("Didn't get expected types of "
"missing files:\n{0}".format(ms))
- self.assertEqual(ms.keys()[0], "empty file")
+ self.assertEqual(list(ms.keys())[0], "empty file")
self.assert_(len(d_map) == 0)
# This should find the binary file first and thus produce
--- a/src/tests/api/t_manifest.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_manifest.py Wed Jul 01 16:20:01 2015 -0700
@@ -26,6 +26,7 @@
import unittest
import tempfile
import os
+import six
import sys
import types
import itertools
@@ -135,7 +136,7 @@
# Verify set_content with a Unicode string results in a
# byte string (for now).
m = manifest.Manifest()
- m.set_content(unicode(bstr, "utf-8"))
+ m.set_content(six.text_type(bstr, "utf-8"))
output = list(m.as_lines())[0].rstrip()
self.assertEqual(bstr, output)
self.assert_(isinstance(output, str))
@@ -375,7 +376,7 @@
output1 = "".join(m1.as_lines())
m2 = manifest.Manifest()
- m2.set_content(unicode(bstr, "utf-8"), signatures=True)
+ m2.set_content(six.text_type(bstr, "utf-8"), signatures=True)
output2 = "".join(m2.as_lines())
self.assertEqualDiff(output1, output2)
self.assertEqualDiff(m1.signatures, m2.signatures)
--- a/src/tests/api/t_p5i.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_p5i.py Wed Jul 01 16:20:01 2015 -0700
@@ -41,8 +41,9 @@
import shutil
import sys
import tempfile
-import urllib
-import urlparse
+
+from six.moves.urllib.parse import urlparse, urlunparse
+from six.moves.urllib.request import pathname2url
class TestP5I(pkg5unittest.Pkg5TestCase):
"""Class to test the functionality of the pkg.p5i module."""
@@ -53,35 +54,35 @@
#
p5i_bobcat = """{
"packages": [
- "pkg:/[email protected],5.11-0",
+ "pkg:/[email protected],5.11-0",
"baz"
- ],
+ ],
"publishers": [
{
- "alias": "cat",
- "name": "bobcat",
+ "alias": "cat",
+ "name": "bobcat",
"packages": [
"pkg:/[email protected],5.11-0"
- ],
+ ],
"repositories": [
{
- "collection_type": "core",
- "description": "xkcd.net/325",
+ "collection_type": "core",
+ "description": "xkcd.net/325",
"legal_uris": [
"http://xkcd.com/license.html"
- ],
- "mirrors": [],
- "name": "source",
+ ],
+ "mirrors": [],
+ "name": "source",
"origins": [
"http://localhost:12001/"
- ],
- "refresh_seconds": 43200,
- "registration_uri": "",
+ ],
+ "refresh_seconds": 43200,
+ "registration_uri": "",
"related_uris": []
}
]
}
- ],
+ ],
"version": 1
}
"""
@@ -170,8 +171,8 @@
# Verify that parse returns the expected object and information
# when provided a file URI.
location = os.path.abspath(path1)
- location = urlparse.urlunparse(("file", "",
- urllib.pathname2url(location), "", "", ""))
+ location = urlunparse(("file", "",
+ pathname2url(location), "", "", ""))
validate_results(p5i.parse(location=location))
fobj.close()
fobj = None
@@ -189,8 +190,8 @@
# p5i information.
lcpath = os.path.join(self.test_root, "libc.so.1")
location = os.path.abspath(lcpath)
- location = urlparse.urlunparse(("file", "",
- urllib.pathname2url(location), "", "", ""))
+ location = urlunparse(("file", "",
+ pathname2url(location), "", "", ""))
# First, test as a file:// URI.
self.assertRaises(api_errors.InvalidP5IFile, p5i.parse,
@@ -207,15 +208,15 @@
# First, test the no repository case.
expected = """{
- "packages": [],
+ "packages": [],
"publishers": [
{
- "alias": "cat",
- "name": "bobcat",
- "packages": [],
+ "alias": "cat",
+ "name": "bobcat",
+ "packages": [],
"repositories": []
}
- ],
+ ],
"version": 1
}
"""
@@ -238,29 +239,29 @@
# Next, test the partial repository configuration case. No
# origin is provided, but everything else is.
expected = """{
- "packages": [],
+ "packages": [],
"publishers": [
{
- "alias": "cat",
- "name": "bobcat",
- "packages": [],
+ "alias": "cat",
+ "name": "bobcat",
+ "packages": [],
"repositories": [
{
- "collection_type": "core",
- "description": "xkcd.net/325",
+ "collection_type": "core",
+ "description": "xkcd.net/325",
"legal_uris": [
"http://xkcd.com/license.html"
- ],
- "mirrors": [],
- "name": "source",
- "origins": [],
- "refresh_seconds": 43200,
- "registration_uri": "",
+ ],
+ "mirrors": [],
+ "name": "source",
+ "origins": [],
+ "refresh_seconds": 43200,
+ "registration_uri": "",
"related_uris": []
}
]
}
- ],
+ ],
"version": 1
}
"""
--- a/src/tests/api/t_p5p.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_p5p.py Wed Jul 01 16:20:01 2015 -0700
@@ -42,6 +42,7 @@
import pkg.pkgtarfile as ptf
import pkg.portable as portable
import shutil
+import six
import sys
import tarfile as tf
import tempfile
@@ -90,7 +91,7 @@
"tmp/LICENSE", "tmp/quux"]
def seed_ta_dir(self, certs, dest_dir=None):
- if isinstance(certs, basestring):
+ if isinstance(certs, six.string_types):
certs = [certs]
if not dest_dir:
dest_dir = self.ta_dir
@@ -449,7 +450,7 @@
sm = pkg.manifest.Manifest(pfmri=pfmri)
sm.set_content(pathname=repo.manifest(pfmri), signatures=True)
- if isinstance(content, basestring):
+ if isinstance(content, six.string_types):
dm = pkg.manifest.Manifest()
dm.set_content(content=content, signatures=True)
else:
--- a/src/tests/api/t_pkg_api_install.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_pkg_api_install.py Wed Jul 01 16:20:01 2015 -0700
@@ -42,10 +42,10 @@
import pkg.portable as portable
import stat
import shutil
-import urllib
-import urlparse
from pkg.client.debugvalues import DebugValues
+from six.moves.urllib.parse import urlunparse
+from six.moves.urllib.request import pathname2url
PKG_CLIENT_NAME = "pkg"
@@ -664,8 +664,8 @@
# Next, create a repository with an older version of pkg,
# and a newer version of foo.
new_repo_dir = os.path.join(self.test_root, "test2")
- new_repo_uri = urlparse.urlunparse(("file", "",
- urllib.pathname2url(new_repo_dir), "", "", ""))
+ new_repo_uri = urlunparse(("file", "",
+ pathname2url(new_repo_dir), "", "", ""))
self.create_repo(new_repo_dir,
properties={ "publisher": { "prefix": "test2" } })
--- a/src/tests/api/t_pkglint.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_pkglint.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,7 +27,6 @@
testutils.setup_environment("../../../proto")
import pkg5unittest
-import ConfigParser
import os.path
import shutil
import unittest
@@ -3495,7 +3494,7 @@
linecnts = [] # tuples of starting line no., ending line no
linecounter = 0 # running total
try:
- data = file(filename).read()
+ data = open(filename).read()
except IOError as e:
lint_logger.error("Unable to read manifest file {0}".format(
filename, msgid="lint.manifest001"))
--- a/src/tests/api/t_pkgtarfile.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_pkgtarfile.py Wed Jul 01 16:20:01 2015 -0700
@@ -49,7 +49,7 @@
filename = "baz"
create_path = os.path.join(filepath, filename)
os.makedirs(filepath)
- wfp = file(create_path, "wb")
+ wfp = open(create_path, "wb")
buf = os.urandom(8192)
wfp.write(buf)
wfp.close()
--- a/src/tests/api/t_sha512_t.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_sha512_t.py Wed Jul 01 16:20:01 2015 -0700
@@ -21,7 +21,7 @@
#
#
-# Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
#
import testutils
@@ -30,6 +30,7 @@
import pkg5unittest
import unittest
+from six.moves import range
try:
import pkg.sha512_t as sha512_t
@@ -125,7 +126,7 @@
# Test scalability
a = sha512_t.SHA512_t()
- for i in xrange(1000000):
+ for i in range(1000000):
a.update("abc")
a.hexdigest()
--- a/src/tests/api/t_unix_usergrp.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/api/t_unix_usergrp.py Wed Jul 01 16:20:01 2015 -0700
@@ -20,8 +20,9 @@
# CDDL HEADER END
#
-# Copyright 2010 Sun Microsystems, Inc. All rights reserved.
-# Use is subject to license terms.
+#
+# Copyright (c) 2010, 2015, Oracle and/or its affiliates. All rights reserved.
+#
import testutils
if __name__ == "__main__":
@@ -45,7 +46,7 @@
if not os.path.exists("/etc/group"):
return
- grpfile = file(os.path.join(self.test_root, "etc", "group"), "w")
+ grpfile = open(os.path.join(self.test_root, "etc", "group"), "w")
grpfile.write( \
"""root::0:
gk::0:
@@ -89,7 +90,7 @@
if not os.path.exists("/etc/group"):
return
- grpfile = file(os.path.join(self.test_root, "etc", "group"), "w")
+ grpfile = open(os.path.join(self.test_root, "etc", "group"), "w")
grpfile.write( \
"""root::0:
blorg
@@ -127,7 +128,7 @@
if not os.path.exists("/etc/group"):
return
- grpfile = file(os.path.join(self.test_root, "etc", "group"), "w")
+ grpfile = open(os.path.join(self.test_root, "etc", "group"), "w")
grpfile.write( \
"""root::0:
gk::0:
@@ -156,7 +157,7 @@
if not os.path.exists("/etc/passwd"):
return
- passwd = file(os.path.join(self.test_root, "etc", "passwd"), "w")
+ passwd = open(os.path.join(self.test_root, "etc", "passwd"), "w")
passwd.write( \
"""root:x:0:0::/root:/usr/bin/bash
gk:x:0:0::/root:/usr/bin/bash
@@ -204,7 +205,7 @@
if not os.path.exists("/etc/passwd"):
return
- passwd = file(os.path.join(self.test_root, "etc", "passwd"), "w")
+ passwd = open(os.path.join(self.test_root, "etc", "passwd"), "w")
passwd.write( \
"""root:x:0:0::/root:/usr/bin/bash
daemon:x:1:1::/:
@@ -242,7 +243,7 @@
if not os.path.exists("/etc/passwd"):
return
- passwd = file(os.path.join(self.test_root, "etc", "passwd"), "w")
+ passwd = open(os.path.join(self.test_root, "etc", "passwd"), "w")
passwd.write( \
"""root:x:0:0::/root:/usr/bin/bash
gk:x:0:0::/root:/usr/bin/bash
--- a/src/tests/baseline.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/baseline.py Wed Jul 01 16:20:01 2015 -0700
@@ -98,7 +98,7 @@
if not self.__generating:
return
try:
- f = file(self.__filename, "w")
+ f = open(self.__filename, "w")
except IOError as xxx_todo_changeme:
(err, msg) = xxx_todo_changeme.args
print("ERROR: storing baseline:", file=sys.stderr)
@@ -107,7 +107,7 @@
return
# Sort the results to make baseline diffs easier
- results_sorted = self.__results.keys()
+ results_sorted = list(self.__results.keys())
results_sorted.sort()
print("# Writing baseline to {0}.".format(self.__filename),
file=sys.stderr)
@@ -124,7 +124,7 @@
return
try:
- f = file(self.__filename, "r")
+ f = open(self.__filename, "r")
except IOError as xxx_todo_changeme1:
(err, msg) = xxx_todo_changeme1.args
print("ERROR: loading baseline:", file=sys.stderr)
--- a/src/tests/cli/t_actuators.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_actuators.py Wed Jul 01 16:20:01 2015 -0700
@@ -27,6 +27,7 @@
testutils.setup_environment("../../../proto")
import os
+import six
import pkg5unittest
import unittest
import stat
@@ -667,26 +668,26 @@
def test_release_note_5(self):
# test unicode character in release notes
self.pkg("install -n [email protected]")
- unicode(self.output, "utf-8").index(u"Моё судно на воздушной подушке полно угрей")
- unicode(self.output, "utf-8").index(u"Eels are best smoked")
+ six.text_type(self.output, "utf-8").index(u"Моё судно на воздушной подушке полно угрей")
+ six.text_type(self.output, "utf-8").index(u"Eels are best smoked")
self.pkg("install -v [email protected]")
- unicode(self.output, "utf-8").index(u"Моё судно на воздушной подушке полно угрей")
- unicode(self.output, "utf-8").index(u"Eels are best smoked")
+ six.text_type(self.output, "utf-8").index(u"Моё судно на воздушной подушке полно угрей")
+ six.text_type(self.output, "utf-8").index(u"Eels are best smoked")
self.pkg("uninstall '*'")
def test_release_note_6(self):
# test parsable unicode
self.pkg("install --parsable 0 [email protected]")
self.pkg("history -n 1 -N")
- unicode(self.output, "utf-8").index(u"Моё судно на воздушной подушке полно угрей")
- unicode(self.output, "utf-8").index(u"Eels are best smoked")
+ six.text_type(self.output, "utf-8").index(u"Моё судно на воздушной подушке полно угрей")
+ six.text_type(self.output, "utf-8").index(u"Eels are best smoked")
self.pkg("uninstall '*'")
def test_release_note_7(self):
# check that multiple release notes are composited properly
self.pkg("install [email protected]")
self.pkg("install -v [email protected] [email protected]")
- uni_out = unicode(self.output, "utf-8")
+ uni_out = six.text_type(self.output, "utf-8")
# we indent the release notes for readability, so a strict
# index or compare won't work unless we remove indenting
# this works for our test cases since they have no leading
@@ -698,11 +699,11 @@
uni_out.index(self.multi_unicode)
uni_out.index(self.multi_ascii)
- # repeat test using history to make sure everything is there.
- # do as unpriv. user
+ # repeat test using history to make sure everything is there.
+ # do as unpriv. user
- self.pkg("history -n 1 -HN", su_wrap=True)
- uni_out = unicode(self.output, "utf-8")
+ self.pkg("history -n 1 -HN", su_wrap=True)
+ uni_out = six.text_type(self.output, "utf-8")
# we indent the release notes for readability, so a strict
# index or compare won't work unless we remove indenting
# this works for our test cases since they have no leading
@@ -720,7 +721,7 @@
# verify that temporary file is correctly written with /n characters
self.pkg("-D GenerateNotesFile=1 install [email protected]")
# find name of file containing release notes in output.
- for field in unicode(self.output, "utf-8").split(u" "):
+ for field in six.text_type(self.output, "utf-8").split(u" "):
try:
if field.index(u"release-note"):
break
@@ -734,7 +735,7 @@
# read release note file and check to make sure
# entire contents are there verbatim
- release_note = unicode(file(field).read(), "utf-8")
+ release_note = six.text_type(open(field).read(), "utf-8")
assert self.multi_unicode == release_note
self.pkg("uninstall '*'")
--- a/src/tests/cli/t_change_facet.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_change_facet.py Wed Jul 01 16:20:01 2015 -0700
@@ -107,7 +107,7 @@
file_path = os.path.join(self.get_img_path(), str(path))
try:
- f = file(file_path)
+ f = open(file_path)
except IOError as e:
if e.errno == errno.ENOENT and negate:
return
@@ -157,7 +157,7 @@
)
# notice that a file should not exist according to its facet
- file(os.path.join(self.get_img_path(), "3"), "w")
+ open(os.path.join(self.get_img_path(), "3"), "w")
self.pkg("verify", exit=1)
os.remove(os.path.join(self.get_img_path(), "3"))
@@ -308,7 +308,7 @@
# First, install faceted package.
self.pkg("install pkg_A")
- for i in xrange(9):
+ for i in range(9):
self.assert_file_is_there(i)
# Next, set general locale.*=False, but locale.fr=True.
--- a/src/tests/cli/t_change_variant.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_change_variant.py Wed Jul 01 16:20:01 2015 -0700
@@ -160,7 +160,7 @@
file_path = os.path.join(self.get_img_path(), path)
try:
- f = file(file_path)
+ f = open(file_path)
except IOError as e:
if e.errno == errno.ENOENT and negate:
return
--- a/src/tests/cli/t_depot_config.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_depot_config.py Wed Jul 01 16:20:01 2015 -0700
@@ -31,13 +31,15 @@
import pkg5unittest
import copy
-import httplib
import os
import time
import unittest
-import urllib2
import certgenerator
import shutil
+from six.moves import http_client
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.parse import quote
+from six.moves.urllib.request import urlopen
from pkg.client.debugvalues import DebugValues
import pkg.fmri
@@ -327,7 +329,7 @@
self.ac.start()
if build_indexes:
# we won't return until indexes are built
- u = urllib2.urlopen(
+ u = urlopen(
"{0}/depot/depot-wait-refresh".format(hc.url)).close()
@@ -529,12 +531,12 @@
def get_url(url_path):
try:
- url_obj = urllib2.urlopen(url_path, timeout=10)
+ url_obj = urlopen(url_path, timeout=10)
self.assert_(url_obj.code == 200,
"Failed to open {0}: {1}".format(url_path,
url_obj.code))
url_obj.close()
- except urllib2.HTTPError as e:
+ except HTTPError as e:
self.debug("Failed to open {0}: {1}".format(
url_path, e))
raise
@@ -565,9 +567,9 @@
bad_url = "{0}/usr/test2/en/catalog.shtml".format(self.ac.url)
raised_404 = False
try:
- url_obj = urllib2.urlopen(bad_url, timeout=10)
+ url_obj = urlopen(bad_url, timeout=10)
url_obj.close()
- except urllib2.HTTPError as e:
+ except HTTPError as e:
if e.code == 404:
raised_404 = True
self.assert_(raised_404, "Didn't get a 404 opening {0}".format(
@@ -652,8 +654,8 @@
# gather the FMRIs we published and the URL-quoted version
first_fmri = pkg.fmri.PkgFmri(first[0])
second_fmri = pkg.fmri.PkgFmri(second[0])
- first_ver = urllib2.quote(str(first_fmri.version))
- second_ver = urllib2.quote(str(second_fmri.version))
+ first_ver = quote(str(first_fmri.version))
+ second_ver = quote(str(second_fmri.version))
self.depotconfig("")
self.image_create()
@@ -832,7 +834,7 @@
expected value 'value'."""
ret = False
try:
- u = urllib2.urlopen(url)
+ u = urlopen(url)
h = u.headers.get(header, "")
if value in h:
return True
@@ -890,8 +892,8 @@
# verify the instance is definitely the one using our custom
# httpd.conf
- u = urllib2.urlopen("{0}/pkg5test-server-status".format(self.ac.url))
- self.assert_(u.code == httplib.OK,
+ u = urlopen("{0}/pkg5test-server-status".format(self.ac.url))
+ self.assert_(u.code == http_client.OK,
"Error getting pkg5-server-status")
self.image_create()
--- a/src/tests/cli/t_https.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_https.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,6 +32,7 @@
import hashlib
import os
import shutil
+import six
import stat
import tempfile
import certgenerator
@@ -316,7 +317,7 @@
*args, **kwargs)
def seed_ta_dir(self, certs, dest_dir=None):
- if isinstance(certs, basestring):
+ if isinstance(certs, six.string_types):
certs = [certs]
if not dest_dir:
dest_dir = self.ta_dir
--- a/src/tests/cli/t_lock.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_lock.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,7 +29,6 @@
testutils.setup_environment("../../../proto")
import pkg5unittest
-import cStringIO
import os
import pkg.client.api_errors as api_errors
import pkg.client.progress as progress
--- a/src/tests/cli/t_pkg_composite.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_composite.py Wed Jul 01 16:20:01 2015 -0700
@@ -36,6 +36,7 @@
import pkg.misc as misc
import pkg.p5p
import shutil
+import six
import stat
import tempfile
import unittest
@@ -105,7 +106,7 @@
"tmp/foo.1", "tmp/README", "tmp/LICENSE", "tmp/quux"]
def __seed_ta_dir(self, certs, dest_dir=None):
- if isinstance(certs, basestring):
+ if isinstance(certs, six.string_types):
certs = [certs]
if not dest_dir:
dest_dir = self.ta_dir
--- a/src/tests/cli/t_pkg_depotd.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_depotd.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,15 +30,17 @@
import pkg5unittest
import datetime
-import httplib
import os
import shutil
+import six
import tempfile
import time
import unittest
-import urllib
-import urllib2
-import urlparse
+
+from six.moves import http_client
+from six.moves.urllib.error import HTTPError, URLError
+from six.moves.urllib.parse import quote, urljoin
+from six.moves.urllib.request import urlopen
import pkg.client.publisher as publisher
import pkg.depotcontroller as dc
@@ -204,8 +206,8 @@
operation doesn't fail."""
depot_url = self.dc.get_depot_url()
plist = self.pkgsend_bulk(depot_url, self.info10)
- repourl = urlparse.urljoin(depot_url, "info/0/{0}".format(plist[0]))
- urllib2.urlopen(repourl)
+ repourl = urljoin(depot_url, "info/0/{0}".format(plist[0]))
+ urlopen(repourl)
def test_bug_3739(self):
"""Verify that a depot will return a 400 (Bad Request) error
@@ -217,10 +219,10 @@
for entry in ("BRCMbnx", "BRCMbnx%40a",
"BRCMbnx%400.5.11%2C5.11-0.101%3A20081119T231649a"):
try:
- urllib2.urlopen("{0}/{1}/0/{2}".format(durl,
+ urlopen("{0}/{1}/0/{2}".format(durl,
operation, entry))
- except urllib2.HTTPError as e:
- if e.code != httplib.BAD_REQUEST:
+ except HTTPError as e:
+ if e.code != http_client.BAD_REQUEST:
raise
def test_bug_5366(self):
@@ -230,18 +232,18 @@
depot_url = self.dc.get_depot_url()
plist = self.pkgsend_bulk(depot_url, self.system10)
# First, try it un-encoded.
- repourl = urlparse.urljoin(depot_url, "info/0/{0}".format(plist[0]))
- urllib2.urlopen(repourl)
- repourl = urlparse.urljoin(depot_url, "manifest/0/{0}".format(
+ repourl = urljoin(depot_url, "info/0/{0}".format(plist[0]))
+ urlopen(repourl)
+ repourl = urljoin(depot_url, "manifest/0/{0}".format(
plist[0]))
- urllib2.urlopen(repourl)
+ urlopen(repourl)
# Second, try it encoded.
- repourl = urlparse.urljoin(depot_url, "info/0/{0}".format(
- urllib.quote(plist[0])))
- urllib2.urlopen(repourl)
- repourl = urlparse.urljoin(depot_url, "manifest/0/{0}".format(
- urllib.quote(plist[0])))
- urllib2.urlopen(repourl)
+ repourl = urljoin(depot_url, "info/0/{0}".format(
+ quote(plist[0])))
+ urlopen(repourl)
+ repourl = urljoin(depot_url, "manifest/0/{0}".format(
+ quote(plist[0])))
+ urlopen(repourl)
def test_info(self):
"""Testing information showed in /info/0."""
@@ -249,8 +251,8 @@
depot_url = self.dc.get_depot_url();
plist = self.pkgsend_bulk(depot_url, self.info20)
- openurl = urlparse.urljoin(depot_url, "info/0/{0}".format(plist[0]))
- content = urllib2.urlopen(openurl).read()
+ openurl = urljoin(depot_url, "info/0/{0}".format(plist[0]))
+ content = urlopen(openurl).read()
# Get text from content.
lines = content.splitlines()
info_dic = {}
@@ -279,8 +281,8 @@
info_dic[attr] = ""
# Read manifest.
- openurl = urlparse.urljoin(depot_url, "manifest/0/{0}".format(plist[0]))
- content = urllib2.urlopen(openurl).read()
+ openurl = urljoin(depot_url, "manifest/0/{0}".format(plist[0]))
+ content = urlopen(openurl).read()
manifest = man.Manifest()
manifest.set_content(content=content)
fmri_content = manifest.get("pkg.fmri", "")
@@ -347,12 +349,12 @@
# any attempts to go outside that directory should fail
# with a 404 error.
try:
- urllib2.urlopen("{0}/../../../../bin/pkg".format(depot_url))
- except urllib2.HTTPError as e:
- if e.code != httplib.NOT_FOUND:
+ urlopen("{0}/../../../../bin/pkg".format(depot_url))
+ except HTTPError as e:
+ if e.code != http_client.NOT_FOUND:
raise
- f = urllib2.urlopen("{0}/robots.txt".format(depot_url))
+ f = urlopen("{0}/robots.txt".format(depot_url))
self.assert_(len(f.read()))
f.close()
@@ -475,11 +477,11 @@
self.pkgsend_bulk(depot_url, self.foo10)
self.pkgsend_bulk(depot_url, self.entire10)
- repourl = urlparse.urljoin(depot_url,
+ repourl = urljoin(depot_url,
"/en/catalog.shtml?version={0}&action=Browse".format(
- urllib.quote("[email protected],5.11-0")))
+ quote("[email protected],5.11-0")))
- res = urllib2.urlopen(repourl)
+ res = urlopen(repourl)
class TestDepotController(pkg5unittest.CliTestCase):
@@ -575,10 +577,10 @@
if pub:
pub = "{0}/".format(pub)
try:
- urllib2.urlopen("{0}{1}/feed".format(durl,
+ urlopen("{0}{1}/feed".format(durl,
pub))
got = True
- except urllib2.HTTPError as e:
+ except HTTPError as e:
self.debug(str(e))
time.sleep(1)
self.assert_(got)
@@ -672,9 +674,9 @@
self.__dc.start()
durl = self.__dc.get_depot_url()
try:
- urllib2.urlopen("{0}/catalog/1/".format(durl))
- except urllib2.HTTPError as e:
- self.assertEqual(e.code, httplib.NOT_FOUND)
+ urlopen("{0}/catalog/1/".format(durl))
+ except HTTPError as e:
+ self.assertEqual(e.code, http_client.NOT_FOUND)
self.__dc.stop()
# For this disabled case, all /catalog/ operations should return
@@ -685,9 +687,9 @@
durl = self.__dc.get_depot_url()
for ver in (0, 1):
try:
- urllib2.urlopen("{0}/catalog/{1:d}/".format(durl, ver))
- except urllib2.HTTPError as e:
- self.assertEqual(e.code, httplib.NOT_FOUND)
+ urlopen("{0}/catalog/{1:d}/".format(durl, ver))
+ except HTTPError as e:
+ self.assertEqual(e.code, http_client.NOT_FOUND)
self.__dc.stop()
# In the normal case, /catalog/1/ should return
@@ -696,9 +698,9 @@
self.__dc.start()
durl = self.__dc.get_depot_url()
try:
- urllib2.urlopen("{0}/catalog/1/".format(durl))
- except urllib2.HTTPError as e:
- self.assertEqual(e.code, httplib.FORBIDDEN)
+ urlopen("{0}/catalog/1/".format(durl))
+ except HTTPError as e:
+ self.assertEqual(e.code, http_client.FORBIDDEN)
self.__dc.stop()
# A bogus operation should prevent the depot from starting.
@@ -827,10 +829,10 @@
"unexpectedly")
try:
- f = urllib2.urlopen(durl)
+ f = urlopen(durl)
daemon_started = True
break
- except urllib2.URLError as e:
+ except URLError as e:
time.sleep(check_interval)
if not daemon_started:
@@ -920,7 +922,7 @@
for path in pages:
# Any error responses will cause an
# exception.
- response = urllib2.urlopen(
+ response = urlopen(
"{0}/{1}".format(durl, path))
fd, fpath = tempfile.mkstemp(
@@ -977,7 +979,7 @@
pub_repo = publisher.Repository()
pub.repository = pub_repo
- for attr, val in self.pub_repo_cfg.iteritems():
+ for attr, val in six.iteritems(self.pub_repo_cfg):
setattr(pub_repo, attr, val)
repo.update_publisher(pub)
@@ -992,7 +994,7 @@
self.dc.start()
durl = self.dc.get_depot_url()
- purl = urlparse.urljoin(durl, "publisher/0")
+ purl = urljoin(durl, "publisher/0")
entries = p5i.parse(location=purl)
assert entries[0][0].prefix == "test"
assert entries[1][0].prefix == "org.opensolaris.pending"
@@ -1005,7 +1007,7 @@
cfgdata["publisher"][prop])
repo = pub.repository
- for prop, expected in self.pub_repo_cfg.iteritems():
+ for prop, expected in six.iteritems(self.pub_repo_cfg):
returned = getattr(repo, prop)
if prop.endswith("uris") or prop == "origins":
uris = []
@@ -1036,7 +1038,7 @@
# and then verify that the parsed response has the expected
# package information under the expected publisher.
for p in plist:
- purl = urlparse.urljoin(durl, "p5i/0/{0}".format(p))
+ purl = urljoin(durl, "p5i/0/{0}".format(p))
pub, pkglist = p5i.parse(location=purl)[0]
# p5i files contain non-qualified FMRIs as the FMRIs
@@ -1048,23 +1050,23 @@
# Try again, but only using package stems.
for p in plist:
stem = fmri.PkgFmri(p).pkg_name
- purl = urlparse.urljoin(durl, "p5i/0/{0}".format(stem))
+ purl = urljoin(durl, "p5i/0/{0}".format(stem))
pub, pkglist = p5i.parse(location=purl)[0]
self.assertEqual(pkglist, [stem])
# Try again, but using wildcards (which will return a list of
# matching package stems).
- purl = urlparse.urljoin(durl, "p5i/0/zfs*")
+ purl = urljoin(durl, "p5i/0/zfs*")
pub, pkglist = p5i.parse(location=purl)[0]
self.assertEqual(pkglist, ["zfs-extras", "zfs/utils"])
# Finally, verify that a non-existent package will error out
# with a httplib.NOT_FOUND.
try:
- urllib2.urlopen(urlparse.urljoin(durl,
+ urlopen(urljoin(durl,
"p5i/0/nosuchpackage"))
- except urllib2.HTTPError as e:
- if e.code != httplib.NOT_FOUND:
+ except HTTPError as e:
+ if e.code != http_client.NOT_FOUND:
raise
def test_3_headers(self):
@@ -1087,11 +1089,11 @@
def get_headers(req_path):
try:
- rinfo = urllib2.urlopen(urlparse.urljoin(durl,
+ rinfo = urlopen(urljoin(durl,
req_path)).info()
- return rinfo.items()
- except urllib2.HTTPError as e:
- return e.info().items()
+ return list(rinfo.items())
+ except HTTPError as e:
+ return list(e.info().items())
except Exception as e:
raise RuntimeError("retrieval of {0} "
"failed: {1}".format(req_path, str(e)))
@@ -1142,16 +1144,16 @@
durl = self.dc.get_depot_url()
self.pkgsend_bulk(durl, self.quux10, refresh_index=True)
- surl = urlparse.urljoin(durl,
+ surl = urljoin(durl,
"en/search.shtml?action=Search&token=*")
- urllib2.urlopen(surl).read()
- surl = urlparse.urljoin(durl,
+ urlopen(surl).read()
+ surl = urljoin(durl,
"en/advanced_search.shtml?action=Search&token=*")
- urllib2.urlopen(surl).read()
- surl = urlparse.urljoin(durl,
+ urlopen(surl).read()
+ surl = urljoin(durl,
"en/advanced_search.shtml?token=*&show=a&rpp=50&"
"action=Advanced+Search")
- urllib2.urlopen(surl).read()
+ urlopen(surl).read()
def test_address(self):
"""Verify that depot address can be set."""
@@ -1166,7 +1168,7 @@
# Check that we can retrieve something.
durl = self.dc.get_depot_url()
- verdata = urllib2.urlopen("{0}/versions/0/".format(durl))
+ verdata = urlopen("{0}/versions/0/".format(durl))
def test_log_depot_daemon(self):
"""Verify that depot daemon works properly and the error
@@ -1191,8 +1193,8 @@
# the error msg.
durl = "http://localhost:{0}/catalog/0".format(curport)
try:
- urllib2.urlopen(durl)
- except urllib2.URLError as e:
+ urlopen(durl)
+ except URLError as e:
pass
# Stop the depot daemon.
self.__depot_daemon_stop(depot_handle)
--- a/src/tests/cli/t_pkg_freeze.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_freeze.py Wed Jul 01 16:20:01 2015 -0700
@@ -239,7 +239,7 @@
self.pkg("freeze", su_wrap=True)
# Test that we don't stack trace if the version is unexpected.
- version, d = json.load(file(pth))
+ version, d = json.load(open(pth))
with open(pth, "wb") as fh:
json.dump((-1, d), fh)
self.pkg("freeze", exit=1)
--- a/src/tests/cli/t_pkg_help.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_help.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,6 +30,7 @@
import codecs
import os
import re
+import six
import unittest
class TestPkgHelp(pkg5unittest.CliTestCase):
@@ -122,7 +123,7 @@
locale_env = { "LC_ALL": "ja_JP.eucJP" }
ret, out, err = self.pkg("help -v", env_arg=locale_env,
out=True, stderr=True)
- cmd_out = unicode(err, encoding="eucJP")
+ cmd_out = six.text_type(err, encoding="eucJP")
# Take only 4 lines from "pkg --help" command output.
u_out = cmd_out.splitlines()[:4]
--- a/src/tests/cli/t_pkg_history.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_history.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,6 +34,7 @@
import random
import re
import shutil
+import six
import subprocess
import time
import unittest
@@ -321,7 +322,7 @@
# Make sure we have a nice number of entries with which to
# experiment.
#
- for i in xrange(5):
+ for i in range(5):
self.pkg("install pkg{0:d}".format(i), exit=1)
self.pkg("history -Hn 3")
self.assertEqual(len(self.output.splitlines()), 3)
@@ -339,7 +340,7 @@
self.assertEqual(len(self.output.splitlines()), count)
def test_10_history_columns(self):
- """Verify the -o option """
+ """Verify the -o option """
self.pkg("history -H -n 1")
# START OPERATION CLIENT OUTCOME
@@ -384,7 +385,7 @@
events[timestamp] = [operation]
# verify we can retrieve each event
- for timestamp in events:
+ for timestamp in events:
operations = set(events[timestamp])
self.pkg("history -H -t {0} -o operation".format(timestamp))
arr = self.output.splitlines()
@@ -394,7 +395,7 @@
self.assert_(found == operations,
"{0} does not equal {1} for {2}".format(
found, operations, timestamp))
-
+
# record timestamp and expected result for 3 random,
# unique timestamps. Since each timestamp can result in
# multiple events, we need to calculate how many events to
@@ -416,7 +417,7 @@
self.assert_(len(output) == expected_count,
"Expected {0} events, got {1}".format(expected_count,
len(output)))
-
+
for line in output:
fields = line.split()
timestamp = fields[0].strip()
@@ -475,7 +476,7 @@
"{0} does not equal {1}".format(single_entry_output, self.output))
# verify a random range taken from the history is correct
- timestamps = entries.keys()
+ timestamps = list(entries.keys())
timestamps.sort()
# get two random indices from our list of timestamps
@@ -507,7 +508,7 @@
for line in range_lines:
ts = line.strip().split()[0]
self.assert_(line in entries[ts],
- "{0} does not appear in {1}".format(line, entries[ts]))
+ "{0} does not appear in {1}".format(line, entries[ts]))
range_timestamps.append(ts)
# determine the reverse. That is, for each entry in the
@@ -561,7 +562,7 @@
operation.attrib["end_time"] = "20120229T000000Z"
new_file = re.sub(".xml", "99.xml", latest)
- outfile = file(os.path.join(history_dir, new_file), "w")
+ outfile = open(os.path.join(history_dir, new_file), "w")
outfile.write(xml.etree.ElementTree.tostring(root))
outfile.close()
--- a/src/tests/cli/t_pkg_image_create.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_image_create.py Wed Jul 01 16:20:01 2015 -0700
@@ -37,6 +37,7 @@
import pkg.config as cfg
import pkg.misc as misc
import shutil
+import six
import unittest
@@ -110,11 +111,11 @@
return "{0}/pkg/{1}/installed".format(imgdir,
fmri.get_dir_path())
- f = file(install_file(fmri), "w")
+ f = open(install_file(fmri), "w")
f.writelines(["VERSION_1\n_PRE_", fmri.publisher])
f.close()
- fi = file("{0}/state/installed/{1}".format(imgdir,
+ fi = open("{0}/state/installed/{1}".format(imgdir,
fmri.get_link_path()), "w")
fi.close()
@@ -193,7 +194,7 @@
user_provided_dir=True, cmdpath=cmdpath)
pub = img.get_publisher(prefix=prefix)
for section in pub_cfg:
- for prop, val in pub_cfg[section].iteritems():
+ for prop, val in six.iteritems(pub_cfg[section]):
if section == "publisher":
pub_val = getattr(pub, prop)
else:
--- a/src/tests/cli/t_pkg_install.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_install.py Wed Jul 01 16:20:01 2015 -0700
@@ -42,7 +42,9 @@
import tempfile
import time
import unittest
-import urllib2
+from six.moves import range
+from six.moves.urllib.parse import quote
+from six.moves.urllib.request import urlopen, build_opener, ProxyHandler, Request
import pkg.actions
import pkg.digest as digest
@@ -1247,27 +1249,27 @@
# format pkg(1) uses - two logically identical urls that
# differ only by the way they're quoted are treated by
# Apache as separate cacheable resources.
- "{0}/test1/manifest/0/foo@{1}".format(self.durl1, urllib2.quote(
+ "{0}/test1/manifest/0/foo@{1}".format(self.durl1, quote(
foo_version)),
"{0}/test1/file/1/8535c15c49cbe1e7cb1a0bf8ff87e512abed66f8".format(
self.durl1),
]
- proxy_handler = urllib2.ProxyHandler({"http": sysrepo_url})
- proxy_opener = urllib2.build_opener(proxy_handler)
+ proxy_handler = ProxyHandler({"http": sysrepo_url})
+ proxy_opener = build_opener(proxy_handler)
# validate that our cache is returning corrupt urls.
for url in urls:
self.debug("url:{0}".format(url))
# we should get clean content when we don't use the
# cache
- u = urllib2.urlopen(url)
+ u = urlopen(url)
content = u.readlines()
self.assert_(content != ["noodles\n"],
"Unexpected content from depot")
# get the corrupted version, and verify it is broken
- req = urllib2.Request(url)
+ req = Request(url)
u = proxy_opener.open(req)
content = u.readlines()
@@ -1284,7 +1286,7 @@
# since the cache has been refreshed, we should see valid
# contents when going through the proxy now.
for url in urls:
- req = urllib2.Request(url)
+ req = Request(url)
u = proxy_opener.open(req)
content = u.readlines()
self.assert_(content != ["noodles\n"],
@@ -1307,7 +1309,7 @@
"{0}/file/85/8535c15c49cbe1e7cb1a0bf8ff87e512abed66f8".format(
prefix))
mfpath = os.path.join(repodir, "{0}/pkg/foo/{1}".format(prefix,
- urllib2.quote(foo_version)))
+ quote(foo_version)))
catpath = os.path.join(repodir, "{0}/catalog/catalog.base.C".format(
prefix))
@@ -3149,7 +3151,7 @@
# make local changes to the user
pwdpath = os.path.join(self.get_img_path(), "etc/passwd")
- pwdfile = file(pwdpath, "r+")
+ pwdfile = open(pwdpath, "r+")
lines = pwdfile.readlines()
for i, l in enumerate(lines):
if l.startswith("Kermit"):
@@ -3250,7 +3252,7 @@
self.pkg("{0} dricon@1".format(install_cmd))
# This one should comment out the wigit entry in driver_aliases
self.pkg("{0} dricon@2".format(install_cmd))
- da_contents = file(os.path.join(self.get_img_path(),
+ da_contents = open(os.path.join(self.get_img_path(),
"etc/driver_aliases")).readlines()
self.assert_("# pkg(5): wigit \"pci8086,1234\"\n" in da_contents)
self.assert_("wigit \"pci8086,1234\"\n" not in da_contents)
@@ -3280,7 +3282,7 @@
# Check that there is a policy entry for this
# device in /etc/security/device_policy
- dp_contents = file(os.path.join(self.get_img_path(),
+ dp_contents = open(os.path.join(self.get_img_path(),
"etc/security/device_policy")).readlines()
self.assert_("frigit:*\tread_priv_set=net_rawaccess\twrite_priv_set=net_rawaccess\n" in dp_contents)
@@ -3289,7 +3291,7 @@
# Check that there is no longer a policy entry for this
# device in /etc/security/device_policy
- dp_contents = file(os.path.join(self.get_img_path(),
+ dp_contents = open(os.path.join(self.get_img_path(),
"etc/security/device_policy")).readlines()
self.assert_("frigit:*\tread_priv_set=net_rawaccess\twrite_priv_set=net_rawaccess\n" not in dp_contents)
@@ -4671,46 +4673,46 @@
# the file, and that the user verifies.
self.pkg("install notftpuser")
fpath = self.get_img_path() + "/etc/ftpd/ftpusers"
- self.assert_("animal\n" in file(fpath).readlines())
+ self.assert_("animal\n" in open(fpath).readlines())
self.pkg("verify notftpuser")
# Add a user with an explicit ftpuser=true. Make sure the user
# is not added to the file, and that the user verifies.
self.pkg("install ftpuserexp")
- self.assert_("fozzie\n" not in file(fpath).readlines())
+ self.assert_("fozzie\n" not in open(fpath).readlines())
self.pkg("verify ftpuserexp")
# Add a user with an implicit ftpuser=true. Make sure the user
# is not added to the file, and that the user verifies.
self.pkg("install ftpuserimp")
- self.assert_("gonzo\n" not in file(fpath).readlines())
+ self.assert_("gonzo\n" not in open(fpath).readlines())
self.pkg("verify ftpuserimp")
# Put a user into the ftpusers file as shipped, then add that
# user, with ftpuser=false. Make sure the user remains in the
# file, and that the user verifies.
self.pkg("uninstall notftpuser")
- file(fpath, "a").write("animal\n")
+ open(fpath, "a").write("animal\n")
self.pkg("install notftpuser")
- self.assert_("animal\n" in file(fpath).readlines())
+ self.assert_("animal\n" in open(fpath).readlines())
self.pkg("verify notftpuser")
# Put a user into the ftpusers file as shipped, then add that
# user, with an explicit ftpuser=true. Make sure the user is
# stripped from the file, and that the user verifies.
self.pkg("uninstall ftpuserexp")
- file(fpath, "a").write("fozzie\n")
+ open(fpath, "a").write("fozzie\n")
self.pkg("install ftpuserexp")
- self.assert_("fozzie\n" not in file(fpath).readlines())
+ self.assert_("fozzie\n" not in open(fpath).readlines())
self.pkg("verify ftpuserexp")
# Put a user into the ftpusers file as shipped, then add that
# user, with an implicit ftpuser=true. Make sure the user is
# stripped from the file, and that the user verifies.
self.pkg("uninstall ftpuserimp")
- file(fpath, "a").write("gonzo\n")
+ open(fpath, "a").write("gonzo\n")
self.pkg("install ftpuserimp")
- self.assert_("gonzo\n" not in file(fpath).readlines())
+ self.assert_("gonzo\n" not in open(fpath).readlines())
self.pkg("verify ftpuserimp")
def test_groupverify_install(self):
@@ -4734,20 +4736,20 @@
# add additional members to group & verify
gpath = self.get_img_file_path("etc/group")
- gdata = file(gpath).readlines()
+ gdata = open(gpath).readlines()
gdata[-1] = gdata[-1].rstrip() + "kermit,misspiggy\n"
- file(gpath, "w").writelines(gdata)
+ open(gpath, "w").writelines(gdata)
self.pkg("verify simplegroup")
self.pkg("uninstall simplegroup")
# verify that groups appear in gid order.
self.pkg("install simplegroup simplegroup2")
self.pkg("verify")
- gdata = file(gpath).readlines()
+ gdata = open(gpath).readlines()
self.assert_(gdata[-1].find("muppets2") == 0)
self.pkg("uninstall simple*")
self.pkg("install simplegroup2 simplegroup")
- gdata = file(gpath).readlines()
+ gdata = open(gpath).readlines()
self.assert_(gdata[-1].find("muppets2") == 0)
def test_preexisting_group_install(self):
@@ -4766,14 +4768,14 @@
self.pkg("install basics")
gpath = self.get_img_file_path("etc/group")
- gdata = file(gpath).readlines()
+ gdata = open(gpath).readlines()
gdata = ["muppets::1010:\n"] + gdata
- file(gpath, "w").writelines(gdata)
+ open(gpath, "w").writelines(gdata)
self.pkg("verify")
self.pkg("install simplegroup@1")
self.pkg("verify simplegroup")
# check # lines beginning w/ 'muppets' in group file
- gdata = file(gpath).readlines()
+ gdata = open(gpath).readlines()
self.assert_(
len([a for a in gdata if a.find("muppets") == 0]) == 1)
@@ -4820,8 +4822,8 @@
self.pkg("verify")
# edit group file to remove muppets group
gpath = self.get_img_file_path("etc/group")
- gdata = file(gpath).readlines()
- file(gpath, "w").writelines(gdata[0:-1])
+ gdata = open(gpath).readlines()
+ open(gpath, "w").writelines(gdata[0:-1])
# verify that we catch missing group
# in both group and user actions
@@ -4832,7 +4834,7 @@
self.pkg("uninstall missing*")
# try installing w/ broken group
- file(gpath, "w").writelines(gdata[0:-1])
+ open(gpath, "w").writelines(gdata[0:-1])
self.pkg("install missing_group@1", 1)
self.pkg("fix muppetsgroup")
self.pkg("install missing_group@1")
@@ -4863,16 +4865,16 @@
self.pkg("verify simpleuser")
ppath = self.get_img_path() + "/etc/passwd"
- pdata = file(ppath).readlines()
+ pdata = open(ppath).readlines()
spath = self.get_img_path() + "/etc/shadow"
- sdata = file(spath).readlines()
+ sdata = open(spath).readlines()
def finderr(err):
self.assert_("\t\tERROR: " + err in self.output)
# change a provided, empty-default field to something else
pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/:/bin/zsh"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
self.pkg("verify simpleuser", exit=1)
finderr("login-shell: '/bin/zsh' should be '/bin/sh'")
self.pkg("fix simpleuser")
@@ -4880,7 +4882,7 @@
# change a provided, non-empty-default field to the default
pdata[-1] = "misspiggy:x:5:0:& User:/:/bin/sh"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
self.pkg("verify simpleuser", exit=1)
finderr("gcos-field: '& User' should be '& loves Kermie'")
self.pkg("fix simpleuser")
@@ -4889,7 +4891,7 @@
# change a non-provided, non-empty-default field to something
# other than the default
pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/misspiggy:/bin/sh"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
self.pkg("verify simpleuser", exit=1)
finderr("home-dir: '/misspiggy' should be '/'")
self.pkg("fix simpleuser")
@@ -4898,10 +4900,10 @@
# add a non-provided, empty-default field
pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/:/bin/sh"
sdata[-1] = "misspiggy:*LK*:14579:7:::::"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
os.chmod(spath,
stat.S_IMODE(os.stat(spath).st_mode)|stat.S_IWUSR)
- file(spath, "w").writelines(sdata)
+ open(spath, "w").writelines(sdata)
self.pkg("verify simpleuser", exit=1)
finderr("min: '7' should be '<empty>'")
# fails fix since we don't repair shadow entries on purpose
@@ -4912,8 +4914,8 @@
# remove a non-provided, non-empty-default field
pdata[-1] = "misspiggy:x:5:0:& loves Kermie::/bin/sh"
sdata[-1] = "misspiggy:*LK*:14579::::::"
- file(ppath, "w").writelines(pdata)
- file(spath, "w").writelines(sdata)
+ open(ppath, "w").writelines(pdata)
+ open(spath, "w").writelines(sdata)
self.pkg("verify simpleuser", exit=1)
finderr("home-dir: '' should be '/'")
self.pkg("fix simpleuser")
@@ -4921,7 +4923,7 @@
# remove a provided, non-empty-default field
pdata[-1] = "misspiggy:x:5:0::/:/bin/sh"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
self.pkg("verify simpleuser", exit=1)
finderr("gcos-field: '' should be '& loves Kermie'")
self.pkg("fix simpleuser")
@@ -4929,7 +4931,7 @@
# remove a provided, empty-default field
pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/:"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
self.pkg("verify simpleuser", exit=1)
finderr("login-shell: '' should be '/bin/sh'")
self.pkg("fix simpleuser")
@@ -4937,7 +4939,7 @@
# remove the user from /etc/passwd
pdata[-1] = "misswiggy:x:5:0:& loves Kermie:/:"
- file(ppath, "w").writelines(pdata)
+ open(ppath, "w").writelines(pdata)
self.pkg("verify simpleuser", exit=1)
finderr("login-shell: '<missing>' should be '/bin/sh'")
finderr("gcos-field: '<missing>' should be '& loves Kermie'")
@@ -4948,8 +4950,8 @@
# remove the user completely
pdata[-1] = "misswiggy:x:5:0:& loves Kermie:/:"
sdata[-1] = "misswiggy:*LK*:14579::::::"
- file(ppath, "w").writelines(pdata)
- file(spath, "w").writelines(sdata)
+ open(ppath, "w").writelines(pdata)
+ open(spath, "w").writelines(sdata)
self.pkg("verify simpleuser", exit=1)
finderr("username: '<missing>' should be 'misspiggy'")
self.pkg("fix simpleuser")
@@ -4958,7 +4960,7 @@
# change the password and show an error
self.pkg("verify simpleuser")
sdata[-1] = "misspiggy:NP:14579::::::"
- file(spath, "w").writelines(sdata)
+ open(spath, "w").writelines(sdata)
self.pkg("verify simpleuser", exit=1)
finderr("password: 'NP' should be '*LK*'")
self.pkg("fix simpleuser")
@@ -4969,10 +4971,10 @@
# do not cause verify errors if changed.
self.pkg("install --reject simpleuser simpleuser2@1")
self.pkg("verify simpleuser2")
- pdata = file(ppath).readlines()
- sdata = file(spath).readlines()
+ pdata = open(ppath).readlines()
+ sdata = open(spath).readlines()
sdata[-1] = "kermit:$5$pWPEsjm2$GXjBRTjGeeWmJ81ytw3q1ah7QTaI7yJeRYZeyvB.Rp1:14579::::::"
- file(spath, "w").writelines(sdata)
+ open(spath, "w").writelines(sdata)
self.pkg("verify simpleuser2")
# verify that upgrading package to version that implicitly
@@ -4980,7 +4982,7 @@
# verifies correctly
self.pkg("update simpleuser2@2")
self.pkg("verify simpleuser2")
- sdata = file(spath).readlines()
+ sdata = open(spath).readlines()
sdata[-1].index("*LK*")
# ascertain that users are added in uid order when
@@ -4988,13 +4990,13 @@
self.pkg("uninstall simpleuser2")
self.pkg("install simpleuser simpleuser2")
- pdata = file(ppath).readlines()
+ pdata = open(ppath).readlines()
pdata[-1].index("kermit")
self.pkg("uninstall simpleuser simpleuser2")
self.pkg("install simpleuser2 simpleuser")
- pdata = file(ppath).readlines()
+ pdata = open(ppath).readlines()
pdata[-1].index("kermit")
def test_minugid(self):
@@ -5014,13 +5016,13 @@
self.pkg("install ugidtest")
else:
self.pkg("exact-install basics ugidtest")
- passwd_file = file(os.path.join(self.get_img_path(),
+ passwd_file = open(os.path.join(self.get_img_path(),
"/etc/passwd"))
for line in passwd_file:
if line.startswith("dummy"):
self.assert_(line.startswith("dummy:x:5:"))
passwd_file.close()
- group_file = file(os.path.join(self.get_img_path(),
+ group_file = open(os.path.join(self.get_img_path(),
"/etc/group"))
for line in group_file:
if line.startswith("dummy"):
@@ -5065,7 +5067,7 @@
(3, ":::::::::"), (100, "")):
garbage += "\n"
self.pkg("install basics")
- with file(pwd_path, "r+") as pwd_file:
+ with open(pwd_path, "r+") as pwd_file:
lines = pwd_file.readlines()
lines[lineno:lineno] = garbage
pwd_file.truncate(0)
@@ -5075,11 +5077,11 @@
self.pkg("{0} singleuser".format(install_cmd))
else:
self.pkg("{0} basics singleuser".format(install_cmd))
- with file(pwd_path) as pwd_file:
+ with open(pwd_path) as pwd_file:
lines = pwd_file.readlines()
self.assert_(garbage in lines)
self.pkg("uninstall singleuser")
- with file(pwd_path) as pwd_file:
+ with open(pwd_path) as pwd_file:
lines = pwd_file.readlines()
self.assert_(garbage in lines)
@@ -5097,7 +5099,7 @@
self.image_create(self.rurl)
self.pkg("install [email protected]")
group_path = os.path.join(self.get_img_path(), "etc/group")
- with file(group_path, "r+") as group_file:
+ with open(group_path, "r+") as group_file:
lines = group_file.readlines()
lines[0] = lines[0][:-1] + "Kermit" + "\n"
group_file.truncate(0)
@@ -5162,14 +5164,14 @@
self.image_create(self.rurl)
def readfile():
- dlf = file(os.path.join(self.get_img_path(),
+ dlf = open(os.path.join(self.get_img_path(),
"etc/devlink.tab"))
dllines = dlf.readlines()
dlf.close()
return dllines
def writefile(dllines):
- dlf = file(os.path.join(self.get_img_path(),
+ dlf = open(os.path.join(self.get_img_path(),
"etc/devlink.tab"), "w")
dlf.writelines(dllines)
dlf.close()
@@ -9467,7 +9469,7 @@
if objname.startswith("pkg_") and type(obj) == str:
pkgs.append(obj)
- for i in xrange(20):
+ for i in range(20):
s = """
open massivedupdir{0:d}@0,5.11-0
add dir path=usr owner=root group={{0}} mode={{1}} zig={{2}}
@@ -10337,7 +10339,7 @@
self.pkg("uninstall '*'")
self.pkg("install dupdirp1 dupdirp2@1 dupdirp3 dupdirp4", exit=1)
- pkgs = " ".join("massivedupdir{0:d}".format(x) for x in xrange(20))
+ pkgs = " ".join("massivedupdir{0:d}".format(x) for x in range(20))
self.pkg("install {0}".format(pkgs), exit=1)
# Trigger bug 17943: we install packages with conflicts in two
--- a/src/tests/cli/t_pkg_linked.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_linked.py Wed Jul 01 16:20:01 2015 -0700
@@ -38,6 +38,7 @@
import itertools
import re
import shutil
+import six
import tempfile
import unittest
import sys
@@ -4133,7 +4134,7 @@
self.i_path.insert(i, self.img_path(i))
def __mk_bin(self, path, txt):
- with file(path, "w+") as fobj:
+ with open(path, "w+") as fobj:
print(txt, file=fobj)
self.cmdline_run("chmod a+x {0}".format(path), coverage=False)
@@ -4304,7 +4305,7 @@
ipath, liname, outfile1))
self.__ccmd("cat {0}".format(outfile1))
- for p, v in props.iteritems():
+ for p, v in six.iteritems(props):
if v is None:
# verify property is not present
self.__ccmd(
--- a/src/tests/cli/t_pkg_mediated.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_mediated.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import pkg.misc as misc
import pkg.p5p
import shutil
+import six
import stat
import tempfile
import unittest
@@ -277,7 +278,7 @@
getattr(self, p)
for p in dir(self)
if p.startswith("pkg_") and isinstance(getattr(self, p),
- basestring)
+ six.string_types)
])
def __assert_mediation_matches(self, expected, mediators=misc.EmptyI):
--- a/src/tests/cli/t_pkg_publisher.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_publisher.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,6 +34,7 @@
import pkg.client.image as image
import pkg.misc
import shutil
+import six
import tempfile
import unittest
@@ -813,7 +814,7 @@
user_provided_dir=True, cmdpath=cmdpath)
pub = img.get_publisher(prefix=prefix)
for section in pub_cfg:
- for prop, val in pub_cfg[section].iteritems():
+ for prop, val in six.iteritems(pub_cfg[section]):
if section == "publisher":
pub_val = getattr(pub, prop)
else:
@@ -847,7 +848,7 @@
rpath = dc.get_repodir()
props = ""
for sname in pubcfg:
- for pname, pval in pubcfg[sname].iteritems():
+ for pname, pval in six.iteritems(pubcfg[sname]):
if sname == "publisher" and pname == "prefix":
continue
pname = pname.replace("_", "-")
--- a/src/tests/cli/t_pkg_refresh.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_refresh.py Wed Jul 01 16:20:01 2015 -0700
@@ -391,7 +391,7 @@
# and then get the list of updates files it has created.
repo = dc.get_repo()
v1_cat = repo.get_catalog("test1")
- update = v1_cat.updates.keys()[-1]
+ update = list(v1_cat.updates.keys())[-1]
# All of the entries from the previous operations, and then
# entries for the catalog attrs file, and one catalog update
@@ -464,7 +464,7 @@
# Refresh to get an incremental update, and verify it worked.
self.pkg("refresh")
- update = v1_cat.updates.keys()[-1]
+ update = list(v1_cat.updates.keys())[-1]
expected += [
"/catalog/1/catalog.attrs",
"/catalog/1/{0}".format(update)
@@ -508,7 +508,7 @@
# Refresh to get an incremental update, and verify it worked.
self.pkg("refresh")
- update = v1_cat.updates.keys()[-1]
+ update = list(v1_cat.updates.keys())[-1]
expected += [
"/catalog/1/catalog.attrs",
"/catalog/1/{0}".format(update)
@@ -529,7 +529,7 @@
self.pkgsend_bulk(self.durl1, self.foo12)
repo = dc.get_repo()
v1_cat = repo.get_catalog("test1")
- update = v1_cat.updates.keys()[-1]
+ update = list(v1_cat.updates.keys())[-1]
# Now verify that a refresh induces a full retrieval. The
# catalog.attrs file will be retrieved twice due to the
--- a/src/tests/cli/t_pkg_search.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_search.py Wed Jul 01 16:20:01 2015 -0700
@@ -36,7 +36,8 @@
import shutil
import sys
import unittest
-import urllib2
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import urlopen
import pkg.catalog as catalog
import pkg.client.pkgdefs as pkgdefs
@@ -486,7 +487,7 @@
self.assert_(self.errout == "" )
self.pkg("search -a -r {0}".format(self.large_query), exit=1)
- self.assert_(self.errout != "")
+ self.assert_(self.errout != "")
def _run_local_tests(self):
outfile = os.path.join(self.test_root, "res")
@@ -563,7 +564,7 @@
self._search_op(False, "pkg:/example_path", self.res_local_path)
self.pkg("search -a -l {0}".format(self.large_query), exit=1)
- self.assert_(self.errout != "")
+ self.assert_(self.errout != "")
def _run_local_empty_tests(self):
self.pkg("search -a -l example_pkg", exit=1)
@@ -817,16 +818,16 @@
self.pkg("search -r 'Intel(R)'", exit=1)
self.pkg("search -r 'foo AND <bar>'", exit=1)
- urllib2.urlopen("{0}/en/search.shtml?token=foo+AND+<bar>&"
+ urlopen("{0}/en/search.shtml?token=foo+AND+<bar>&"
"action=Search".format(durl))
- urllib2.urlopen("{0}/en/search.shtml?token=Intel(R)&"
+ urlopen("{0}/en/search.shtml?token=Intel(R)&"
"action=Search".format(durl))
- pkg5unittest.eval_assert_raises(urllib2.HTTPError,
- lambda x: x.code == 400, urllib2.urlopen,
+ pkg5unittest.eval_assert_raises(HTTPError,
+ lambda x: x.code == 400, urlopen,
"{0}/search/1/False_2_None_None_Intel%28R%29".format(durl))
- pkg5unittest.eval_assert_raises(urllib2.HTTPError,
- lambda x: x.code == 400, urllib2.urlopen,
+ pkg5unittest.eval_assert_raises(HTTPError,
+ lambda x: x.code == 400, urlopen,
"{0}/search/1/False_2_None_None_foo%20%3Cbar%3E".format(durl))
def test_bug_10515(self):
@@ -894,12 +895,12 @@
# together. If this changes in the future, because of parallel
# indexing or parallel searching for example, it's possible
# this test will need to be removed or reexamined.
-
+
durl = self.dc.get_depot_url()
self.pkgsend_bulk(durl, self.dup_lines_pkg10)
self.image_create(durl)
-
+
self.pkg("search -a 'dup_lines:set:pkg.fmri:'")
self.assertEqual(len(self.output.splitlines()), 2)
@@ -1033,7 +1034,7 @@
add dir mode=0755 owner=root group=bin path=/bin
add file tmp/example_file mode=0555 owner=root group=bin path=/bin/example_path12
close """
-
+
incorp_pkg11 = """
open pkg://test1/[email protected],5.11-0
add depend fmri=pkg://test1/[email protected],5.11-0 type=incorporate
@@ -1044,7 +1045,7 @@
add dir mode=0755 owner=root group=bin path=/bin
add file tmp/example_file mode=0555 owner=root group=bin path=/bin/example_path12
close """
-
+
incorp_pkg12 = """
open pkg://test2/[email protected],5.11-0
add depend fmri=pkg://test2/[email protected],5.11-0 type=incorporate
@@ -1087,7 +1088,7 @@
def test_16190165(self):
""" Check that pkg search works fine with structured queries
and the scheme name "pkg://" in the query """
-
+
self.pkgsend_bulk(self.durl1, self.example_pkg11, refresh_index=True)
self.pkgsend_bulk(self.durl2, self.example_pkg12, refresh_index=True)
self.pkgsend_bulk(self.durl1, self.incorp_pkg11, refresh_index=True)
--- a/src/tests/cli/t_pkg_sysrepo.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_sysrepo.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import copy
import os
import shutil
+import six
import pkg.client.api_errors as apx
import pkg.client.transport.exception as tx
@@ -382,7 +383,7 @@
if not port:
port = self.sysrepo_port
self.__configured_names = []
- if isinstance(names, basestring):
+ if isinstance(names, six.string_types):
names = [names]
for name in names:
props, pcs = self.configs[name]
--- a/src/tests/cli/t_pkg_temp_sources.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_temp_sources.py Wed Jul 01 16:20:01 2015 -0700
@@ -37,6 +37,7 @@
import pkg.misc as misc
import pkg.p5p
import shutil
+import six
import stat
import tempfile
import unittest
@@ -117,7 +118,7 @@
"tmp/foo.1", "tmp/README", "tmp/LICENSE", "tmp/LICENSE2", "tmp/quux"]
def __seed_ta_dir(self, certs, dest_dir=None):
- if isinstance(certs, basestring):
+ if isinstance(certs, six.string_types):
certs = [certs]
if not dest_dir:
dest_dir = self.ta_dir
--- a/src/tests/cli/t_pkg_varcet.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_varcet.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,6 +33,7 @@
import pkg.misc as misc
import pkg.p5p
import shutil
+import six
import stat
import tempfile
import unittest
@@ -93,7 +94,7 @@
getattr(self, p)
for p in dir(self)
if p.startswith("pkg_") and isinstance(getattr(self, p),
- basestring)
+ six.string_types)
])
def __assert_varcet_matches_default(self, cmd, expected, errout=None,
--- a/src/tests/cli/t_pkg_verify.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkg_verify.py Wed Jul 01 16:20:01 2015 -0700
@@ -139,7 +139,7 @@
self.assert_("Unexpected Exception" not in self.output)
self.assert_("PACKAGE" in self.output and "STATUS" in self.output)
- # Test that "-H" works as expected.
+ # Test that "-H" works as expected.
self.pkg_verify("foo -H", exit=1)
self.assert_("PACKAGE" not in self.output and
"STATUS" not in self.output)
@@ -222,7 +222,7 @@
# check that verify is silent on about modified editable files
self.image_create(self.rurl)
self.pkg("install foo")
- fd = file(os.path.join(self.get_img_path(), "etc", "preserved"), "w+")
+ fd = open(os.path.join(self.get_img_path(), "etc", "preserved"), "w+")
fd.write("Bobcats are here")
fd.close()
self.pkg_verify("foo")
--- a/src/tests/cli/t_pkgdep.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgdep.py Wed Jul 01 16:20:01 2015 -0700
@@ -29,6 +29,7 @@
import itertools
import os
+import six
import subprocess
import sys
import unittest
@@ -1110,7 +1111,7 @@
def check_res(self, expected, seen):
def pick_file(act):
fs = act.attrs[DDP + ".file"]
- if isinstance(fs, basestring):
+ if isinstance(fs, six.string_types):
fs = [fs]
for f in fs:
if f.endswith(".py") and "__init__" not in f:
--- a/src/tests/cli/t_pkgfmt.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgfmt.py Wed Jul 01 16:20:01 2015 -0700
@@ -1292,11 +1292,11 @@
def setUp(self):
pkg5unittest.CliTestCase.setUp(self)
- with file(os.path.join(self.test_root, "source_file"),
+ with open(os.path.join(self.test_root, "source_file"),
"wb") as f:
f.write(self.pkgcontents)
- with file(os.path.join(self.test_root, "needs_fmt_file"),
+ with open(os.path.join(self.test_root, "needs_fmt_file"),
"wb") as f:
f.write(self.needs_formatting)
--- a/src/tests/cli/t_pkglint.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkglint.py Wed Jul 01 16:20:01 2015 -0700
@@ -574,7 +574,7 @@
cache = tempfile.mkdtemp("pkglint-cache", "",
self.test_root)
path = os.path.join(cache, name)
- f = file(path, "w")
+ f = open(path, "w")
f.close()
self.pkglint("-c {0} -r {1} -l {2}".format(
cache, self.ref_uri, self.lint_uri), exit=2)
--- a/src/tests/cli/t_pkgmerge.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgmerge.py Wed Jul 01 16:20:01 2015 -0700
@@ -37,8 +37,6 @@
import shutil
import tempfile
import time
-import urllib
-import urlparse
import unittest
import zlib
--- a/src/tests/cli/t_pkgmogrify.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgmogrify.py Wed Jul 01 16:20:01 2015 -0700
@@ -32,6 +32,7 @@
import os
import re
import shutil
+import six
import stat
import sys
import tempfile
@@ -153,29 +154,29 @@
def setUp(self):
pkg5unittest.CliTestCase.setUp(self)
- f = file(os.path.join(self.test_root, "source_file"), "wb")
+ f = open(os.path.join(self.test_root, "source_file"), "wb")
f.write(self.pkgcontents)
f.close()
- f = file(os.path.join(self.test_root, "source_file2"), "wb")
+ f = open(os.path.join(self.test_root, "source_file2"), "wb")
f.write(self.pkgcontents2)
f.close()
- f = file(os.path.join(self.test_root, "source_file3"), "wb")
+ f = open(os.path.join(self.test_root, "source_file3"), "wb")
f.write(self.pkgcontents3)
f.close()
# Map the transform names to path names
xformpaths = dict((
(name, os.path.join(self.test_root, "transform_{0}".format(i)))
- for i, name in enumerate(self.transforms.iterkeys())
+ for i, name in enumerate(six.iterkeys(self.transforms))
))
# Now that we have path names, we can use the expandos in the
# transform contents to embed those pathnames, and write the
# transform files out.
- for name, path in xformpaths.iteritems():
- f = file(path, "wb")
+ for name, path in six.iteritems(xformpaths):
+ f = open(path, "wb")
self.transforms[name] = self.transforms[name].format(**xformpaths)
f.write(self.transforms[name])
f.close()
@@ -189,7 +190,7 @@
defines = " ".join([
"-D {0}={1}".format(k, v)
- for k, v in defines.iteritems()
+ for k, v in six.iteritems(defines)
])
sources = " ".join(sources)
if output:
@@ -206,7 +207,7 @@
specified, the contents of that file are searched."""
if path is not None:
- output = file(path).read()
+ output = open(path).read()
else:
output = self.output + self.errout
--- a/src/tests/cli/t_pkgrecv.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgrecv.py Wed Jul 01 16:20:01 2015 -0700
@@ -43,12 +43,12 @@
import subprocess
import tempfile
import time
-import urllib
-import urlparse
import unittest
import zlib
from pkg.digest import DEFAULT_HASH_FUNC
+from six.moves.urllib.parse import urlparse
+from six.moves.urllib.request import url2pathname
try:
import pkg.sha512_t
@@ -169,8 +169,8 @@
@staticmethod
def get_repo(uri):
- parts = urlparse.urlparse(uri, "file", allow_fragments=0)
- path = urllib.url2pathname(parts[2])
+ parts = urlparse(uri, "file", allow_fragments=0)
+ path = url2pathname(parts[2])
try:
return repo.Repository(root=path)
@@ -205,7 +205,7 @@
# Test list newest.
self.pkgrecv(self.durl1, "--newest")
output = self.reduceSpaces(self.output)
-
+
def _nobuild_fmri(pfmri):
return fmri.PkgFmri(pfmri).get_fmri(
include_build=False)
@@ -241,13 +241,13 @@
# Verify that the files aren't compressed since -k wasn't used.
# This is also the format pkgsend will expect for correct
# republishing.
- ofile = file(os.devnull, "rb")
+ ofile = open(os.devnull, "rb")
for atype in ("file", "license"):
for a in m.gen_actions_by_type(atype):
if not hasattr(a, "hash"):
continue
- ifile = file(os.path.join(basedir, a.hash),
+ ifile = open(os.path.join(basedir, a.hash),
"rb")
# Since the file shouldn't be compressed, this
@@ -553,13 +553,13 @@
# This is also the format pkgsend will expect for correct
# republishing.
- ofile = file(os.devnull, "rb")
+ ofile = open(os.devnull, "rb")
for atype in ("file", "license"):
for a in m.gen_actions_by_type(atype):
if not hasattr(a, "hash"):
continue
- ifile = file(os.path.join(basedir, a.hash),
+ ifile = open(os.path.join(basedir, a.hash),
"rb")
# Since the file shouldn't be compressed, this
@@ -870,14 +870,14 @@
# publisher to repo which contains same publisher
self.pkgrecv(self.durl1, "--clone -d {0}".format(self.dpath2))
- ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x",
+ ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x",
"index", "-x", "trans", self.dpath1, self.dpath2])
self.assertTrue(ret==0)
# Test that packages in dst which are not in src get removed.
self.pkgsend_bulk(self.durl2, (self.amber30))
self.pkgrecv(self.durl1, "--clone -d {0}".format(self.dpath2))
- ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x",
+ ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x",
"index", "-x", "trans", self.dpath1, self.dpath2])
self.assertTrue(ret==0)
@@ -890,7 +890,7 @@
amber = self.amber10.replace("open ", "open pkg://test2/")
self.pkgsend_bulk(self.durl1, amber)
self.pkgrecv(self.durl1, "--clone -d {0} -p test2".format(self.dpath2))
- ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x",
+ ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x",
"index", "-x", "trans", self.dpath1,
self.dpath2])
self.assertTrue(ret==0)
@@ -1061,7 +1061,7 @@
#set permissions of tmp/verboten to make it non-readable
self.verboten = os.path.join(self.test_root, "tmp/verboten")
os.system("chmod 600 {0}".format(self.verboten))
-
+
def test_01_basics(self):
"""Test that transfering a package from an https repo to
@@ -1160,23 +1160,23 @@
self.pkgrecv(self.surl, "--key {key} --cert {cert} "
"-d {dst} --dkey {empty} --dcert {dcert} "
"{pkg}".format(**arg_dict), exit=1)
-
- # No permissions to read src certificate
+
+ # No permissions to read src certificate
self.pkgrecv(self.surl, "--key {key} --cert {verboten} "
"-d {dst} --dkey {dkey} --dcert {dcert} "
"{pkg}".format(**arg_dict), su_wrap=True, exit=1)
- # No permissions to read dst certificate
+ # No permissions to read dst certificate
self.pkgrecv(self.surl, "--key {key} --cert {cert} "
"-d {dst} --dkey {dkey} --dcert {verboten} "
"{pkg}".format(**arg_dict), su_wrap=True, exit=1)
- # No permissions to read src key
+ # No permissions to read src key
self.pkgrecv(self.surl, "--key {verboten} --cert {cert} "
"-d {dst} --dkey {dkey} --dcert {dcert} "
"{pkg}".format(**arg_dict), su_wrap=True, exit=1)
- # No permissions to read dst key
+ # No permissions to read dst key
self.pkgrecv(self.surl, "--key {key} --cert {cert} "
"-d {dst} --dkey {verboten} --dcert {dcert} "
"{pkg}".format(**arg_dict), su_wrap=True, exit=1)
--- a/src/tests/cli/t_pkgrepo.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgrepo.py Wed Jul 01 16:20:01 2015 -0700
@@ -46,8 +46,6 @@
import subprocess
import tempfile
import time
-import urllib
-import urlparse
import unittest
try:
--- a/src/tests/cli/t_pkgsend.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_pkgsend.py Wed Jul 01 16:20:01 2015 -0700
@@ -36,8 +36,9 @@
import stat
import tempfile
import unittest
-import urllib
-import urllib2
+from six.moves import range
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import urlopen, Request, pathname2url
from pkg import misc
from pkg.actions import fromstr
@@ -302,9 +303,9 @@
try:
url = "{0}/{1}/0/{2}".format(dhurl, "add", "/".join((trx_id,
"set")))
- req = urllib2.Request(url=url, headers=headers)
- urllib2.urlopen(req)
- except urllib2.HTTPError as e:
+ req = Request(url=url, headers=headers)
+ urlopen(req)
+ except HTTPError as e:
err_txt = e.read()
self.assert_("The specified Action attribute "
"value" in err_txt)
@@ -421,8 +422,8 @@
dir_2 = os.path.join(rootdir, "dir_2")
os.mkdir(dir_1)
os.mkdir(dir_2)
- file(os.path.join(dir_1, "A"), "wb").close()
- file(os.path.join(dir_2, "B"), "wb").close()
+ open(os.path.join(dir_1, "A"), "wb").close()
+ open(os.path.join(dir_2, "B"), "wb").close()
mfpath = os.path.join(rootdir, "manifest_test")
with open(mfpath, "wb") as mf:
mf.write("""file NOHASH mode=0755 owner=root group=bin path=/A
@@ -695,7 +696,7 @@
if err.errno != os.errno.EEXIST:
raise
fpath = os.path.join(pkgroot, entry)
- f = file(fpath, "wb")
+ f = open(fpath, "wb")
f.write("test" + entry)
f.close()
# compute a digest of the file we just created,
@@ -712,12 +713,12 @@
raise
pkginfopath = os.path.join(pkgroot, "pkginfo")
- pkginfo = file(pkginfopath, "w")
+ pkginfo = open(pkginfopath, "w")
pkginfo.write(pkginfo_contents)
pkginfo.close()
prototypepath = os.path.join(pkgroot, "prototype")
- prototype = file(prototypepath, "w")
+ prototype = open(prototypepath, "w")
prototype.write(prototype_contents)
prototype.close()
@@ -878,7 +879,7 @@
"""Verify that "pkgsend refresh-index" triggers indexing."""
dhurl = self.dc.get_depot_url()
- dfurl = "file://{0}".format(urllib.pathname2url(self.dc.get_repodir()))
+ dfurl = "file://{0}".format(pathname2url(self.dc.get_repodir()))
fd, fpath = tempfile.mkstemp(dir=self.test_root)
@@ -1032,7 +1033,7 @@
self.dc.stop()
rpath = self.dc.get_repodir()
fpath = os.path.join(self.test_root, "manifest")
- f = file(fpath, "w")
+ f = open(fpath, "w")
f.write(pkg_manifest)
f.close()
self.pkgsend("file://{0}".format(rpath),
@@ -1218,8 +1219,8 @@
dir_2 = os.path.join(rootdir, "dir_2")
os.mkdir(dir_1)
os.mkdir(dir_2)
- file(os.path.join(dir_1, "A"), "wb").close()
- file(os.path.join(dir_2, "B"), "wb").close()
+ open(os.path.join(dir_1, "A"), "wb").close()
+ open(os.path.join(dir_2, "B"), "wb").close()
mfpath = os.path.join(rootdir, "manifest_test")
with open(mfpath, "wb") as mf:
mf.write("""file NOHASH mode=0755 owner=root group=bin path=/A
@@ -1255,7 +1256,7 @@
rootdir = self.test_root
dir_1 = os.path.join(rootdir, "dir_1")
os.mkdir(dir_1)
- file(os.path.join(dir_1, "A"), "wb").close()
+ open(os.path.join(dir_1, "A"), "wb").close()
mfpath = os.path.join(rootdir, "manifest_test")
with open(mfpath, "wb") as mf:
mf.write("""file NOHASH mode=0755 owner=root group=bin path=/A
@@ -1449,7 +1450,7 @@
def do_test(*pathnames):
self.debug("=" * 70)
self.debug("Testing: {0}".format(pathnames,))
- for i in xrange(len(pathnames)):
+ for i in range(len(pathnames)):
l = list(pathnames)
p = l.pop(i)
do_test_one(p, l)
@@ -1556,8 +1557,8 @@
rootdir = self.test_root
dir_1 = os.path.join(rootdir, "dir_1")
os.mkdir(dir_1)
- file(os.path.join(dir_1, "A"), "wb").close()
- file(os.path.join(dir_1, "B"), "wb").close()
+ open(os.path.join(dir_1, "A"), "wb").close()
+ open(os.path.join(dir_1, "B"), "wb").close()
mfpath = os.path.join(rootdir, "manifest_test")
with open(mfpath, "wb") as mf:
mf.write("""file NOHASH mode=0755 owner=root group=bin path=/A
@@ -1583,12 +1584,12 @@
# Add the trust anchor needed to verify the server's identity.
self.seed_ta_dir("ta7")
- # Try to publish a simple package to SSL-secured repo
+ # Try to publish a simple package to SSL-secured repo
self.pkgsend(self.url, "publish --key {key} --cert {cert} "
"-d {dir} {mani}".format(**arg_dict))
# Try to publish a simple package to SSL-secured repo without
- # prvoviding certs (should fail).
+ # prvoviding certs (should fail).
self.pkgsend(self.url, "publish -d {dir} {mani}".format(**arg_dict),
exit=1)
@@ -1617,12 +1618,12 @@
self.pkgsend(self.url, "publish --key {empty} "
"--cert {cert} -d {dir} {mani}".format(**arg_dict), exit=1)
- # No permissions to read certificate
+ # No permissions to read certificate
self.pkgsend(self.url, "publish --key {key} "
"--cert {verboten} -d {dir} {mani}".format(**arg_dict),
su_wrap=True, exit=1)
- # No permissions to read key
+ # No permissions to read key
self.pkgsend(self.url, "publish --key {verboten} "
"--cert {cert} -d {dir} {mani}".format(**arg_dict),
su_wrap=True, exit=1)
--- a/src/tests/cli/t_publish_api.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_publish_api.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,12 +30,13 @@
import pkg5unittest
import os
+from six.moves.urllib.parse import urlunparse
+from six.moves.urllib.request import pathname2url
+
import pkg.client.publisher as publisher
import pkg.client.transport.transport as transport
import pkg.fmri as fmri
import pkg.publish.transaction as trans
-import urlparse
-import urllib
class TestPkgPublicationApi(pkg5unittest.SingleDepotTestCase):
"""Various publication tests."""
@@ -75,8 +76,8 @@
location = self.dc.get_repodir()
location = os.path.abspath(location)
- location = urlparse.urlunparse(("file", "",
- urllib.pathname2url(location), "", "", ""))
+ location = urlunparse(("file", "",
+ pathname2url(location), "", "", ""))
repouriobj = publisher.RepositoryURI(location)
repo = publisher.Repository(origins=[repouriobj])
--- a/src/tests/cli/t_sysrepo.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_sysrepo.py Wed Jul 01 16:20:01 2015 -0700
@@ -38,13 +38,15 @@
import pkg.p5p
import shutil
import unittest
-import urllib2
import shutil
import simplejson
import stat
import sys
import time
import StringIO
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.parse import urlparse, unquote
+from six.moves.urllib.request import urlopen
import pkg.misc as misc
import pkg.portable as portable
@@ -414,7 +416,7 @@
# create a version of this url with a symlink, to ensure we
# can follow links in urls
- urlresult = urllib2.urlparse.urlparse(self.rurl1)
+ urlresult = urlparse(self.rurl1)
symlink_path = os.path.join(self.test_root, "repo_symlink")
os.symlink(urlresult.path, symlink_path)
symlinked_url = "file://{0}".format(symlink_path)
@@ -650,8 +652,8 @@
url = "http://localhost:{0}/{1}".format(
self.sysrepo_port, part)
try:
- resp = urllib2.urlopen(url, None, None)
- except urllib2.HTTPError as e:
+ resp = urlopen(url, None, None)
+ except HTTPError as e:
if e.code != code:
self.assert_(False,
"url {0} returned: {1}".format(url, e))
@@ -693,7 +695,7 @@
"file/1/f5da841b7c3601be5629bb8aef928437de7d534e"]:
url = "http://localhost:{0}/test1/{1}/{2}".format(
self.sysrepo_port, p5p_hash, path)
- resp = urllib2.urlopen(url, None, None)
+ resp = urlopen(url, None, None)
self.debug(resp.readlines())
self.sc.stop()
@@ -752,7 +754,7 @@
# treat it as corrupt, and clobber the old cache
rubbish = {"food preference": "I like noodles."}
other = ["nonsense here"]
- with file(full_cache_path, "wb") as cache_file:
+ with open(full_cache_path, "wb") as cache_file:
simplejson.dump((rubbish, other), cache_file)
self.sysrepo("", stderr=True)
self.assert_("Invalid config cache at" in self.errout)
@@ -781,10 +783,10 @@
# it - despite being well-formed, the cache doesn't contain the
# same configuration as the image, simulating an older version
# of pkg(1) having changed publisher configuration.
- with file(full_cache_path, "rb") as cache_file:
+ with open(full_cache_path, "rb") as cache_file:
uri_pub_map, no_uri_pubs = simplejson.load(cache_file)
- with file(full_cache_path, "wb") as cache_file:
+ with open(full_cache_path, "wb") as cache_file:
del uri_pub_map[self.durl1]
simplejson.dump((uri_pub_map, no_uri_pubs), cache_file,
indent=True)
@@ -997,7 +999,7 @@
# lives outside our normal search path
mod_name = "sysrepo_p5p"
src_name = "{0}.py".format(mod_name)
- sysrepo_p5p_file = file(os.path.join(self.sysrepo_template_dir,
+ sysrepo_p5p_file = open(os.path.join(self.sysrepo_template_dir,
src_name))
self.sysrepo_p5p = imp.load_module(mod_name, sysrepo_p5p_file,
src_name, ("py", "r", imp.PY_SOURCE))
@@ -1035,7 +1037,7 @@
for query in queries:
seen_content = False
- environ["QUERY_STRING"] = urllib2.unquote(query)
+ environ["QUERY_STRING"] = unquote(query)
self.http_status = ""
try:
--- a/src/tests/cli/t_util_merge.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/cli/t_util_merge.py Wed Jul 01 16:20:01 2015 -0700
@@ -36,11 +36,11 @@
import pkg.server.repository as repo
import tempfile
import time
-import urllib
-import urlparse
import unittest
import zlib
+from six.moves.urllib.parse import urlparse
+from six.moves.urllib.request import url2pathname
class TestUtilMerge(pkg5unittest.ManyDepotTestCase):
# Cleanup after every test.
@@ -48,24 +48,24 @@
scheme10 = """
open pkg:/[email protected],5.11-0
- close
+ close
"""
tree10 = """
open [email protected],5.11-0
- close
+ close
"""
amber10 = """
open [email protected],5.11-0
add depend fmri=pkg:/[email protected] type=require
- close
+ close
"""
amber20 = """
open [email protected],5.11-0
add depend fmri=pkg:/[email protected] type=require
- close
+ close
"""
bronze10 = """
@@ -96,7 +96,7 @@
add license tmp/copyright3 license=copyright
add file tmp/bronzeA2 mode=0444 owner=root group=bin path=/A1/B2/C3/D4/E5/F6/bronzeA2
add depend fmri=pkg:/[email protected] type=require
- close
+ close
"""
misc_files = [ "tmp/bronzeA1", "tmp/bronzeA2",
@@ -137,8 +137,8 @@
@staticmethod
def get_repo(uri):
- parts = urlparse.urlparse(uri, "file", allow_fragments=0)
- path = urllib.url2pathname(parts[2])
+ parts = urlparse(uri, "file", allow_fragments=0)
+ path = url2pathname(parts[2])
try:
return repo.Repository(root=path)
@@ -177,7 +177,7 @@
exp_lines = ["set name=pkg.fmri value={0}".format(f)]
for dc in self.dcs.values():
repo = dc.get_repo()
- mpath = repo.manifest(f)
+ mpath = repo.manifest(f)
if not os.path.exists(mpath):
# Not in this repository, check next.
continue
--- a/src/tests/perf/membench.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/perf/membench.py Wed Jul 01 16:20:01 2015 -0700
@@ -64,9 +64,9 @@
startusage = misc.__getvmusage()
n = 0
# Generate a good sized series of valid YYYYMMDD strings
- for y in xrange(1, 10000):
- for m in xrange(1, 10):
- for d in xrange(1, 2):
+ for y in range(1, 10000):
+ for m in range(1, 10):
+ for d in range(1, 2):
n += 1
collection.append(func(int("{0:0=4d}{1:0=2d}{2:0=2d}".format(y, m, d))))
endusage = misc.__getvmusage()
--- a/src/tests/pkg5unittest.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/tests/pkg5unittest.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,13 +34,11 @@
from __future__ import print_function
import baseline
-import ConfigParser
import copy
import difflib
import errno
import gettext
import hashlib
-import httplib
import logging
import multiprocessing
import os
@@ -48,6 +46,7 @@
import shutil
import signal
import simplejson as json
+import six
import stat
import subprocess
import sys
@@ -55,8 +54,6 @@
import time
import traceback
import unittest
-import urllib2
-import urlparse
import operator
import platform
import pty
@@ -69,6 +66,13 @@
import traceback
import types
+from imp import reload
+from six.moves import configparser, http_client
+from six.moves.urllib.error import HTTPError, URLError
+from six.moves.urllib.parse import urljoin
+from six.moves.urllib.request import urlopen
+from socket import error as socketerror
+
import pkg.client.api_errors as apx
import pkg.misc as misc
import pkg.client.publisher as publisher
@@ -76,9 +80,7 @@
import pkg.server.repository as sr
import M2Crypto as m2
-from imp import reload
from pkg.client.debugvalues import DebugValues
-from socket import error as socketerror
EmptyI = tuple()
EmptyDict = dict()
@@ -520,7 +522,7 @@
ins = " [+{0:d} lines...]".format(len(lines) - 1)
else:
ins = ""
- if isinstance(lines[0], unicode):
+ if isinstance(lines[0], six.text_type):
lines[0] = lines[0].encode("utf-8")
self.debugcmd(
"echo '{0}{1}' > {2}".format(lines[0], ins, path))
@@ -853,7 +855,7 @@
os.makedirs(os.path.dirname(path), 0o777)
self.debugfilecreate(content, path)
fh = open(path, 'wb')
- if isinstance(content, unicode):
+ if isinstance(content, six.text_type):
content = content.encode("utf-8")
fh.write(content)
fh.close()
@@ -872,7 +874,7 @@
# a list, simply turn it into a dict where each file's
# contents is its own name, so that we get some uniqueness.
#
- if isinstance(files, basestring):
+ if isinstance(files, six.string_types):
files = [files]
if isinstance(files, list):
@@ -942,9 +944,9 @@
msg=""):
"""Compare two strings."""
- if not isinstance(expected, basestring):
+ if not isinstance(expected, six.string_types):
expected = pprint.pformat(expected)
- if not isinstance(actual, basestring):
+ if not isinstance(actual, six.string_types):
actual = pprint.pformat(actual)
expected_lines = expected.splitlines()
@@ -987,7 +989,7 @@
"""Check that the parsable output in 'output' is what is
expected."""
- if isinstance(output, basestring):
+ if isinstance(output, six.string_types):
try:
outd = json.loads(output)
except Exception as e:
@@ -1052,10 +1054,10 @@
to /.
"""
- new_rcfile = file("{0}/{1}{2}".format(test_root, os.path.basename(rcfile),
+ new_rcfile = open("{0}/{1}{2}".format(test_root, os.path.basename(rcfile),
suffix), "w")
- conf = ConfigParser.RawConfigParser()
+ conf = configparser.RawConfigParser()
conf.readfp(open(rcfile))
for key in config:
@@ -1560,7 +1562,7 @@
# Pull in the information stored in places other than
# the _Pkg5TestResult that we need to send back to the
# master process.
- otw.timing = test_suite.timing.items()
+ otw.timing = list(test_suite.timing.items())
otw.text = buf.getvalue()
otw.baseline_failures = b.getfailures()
if g_debug_output:
@@ -3163,7 +3165,7 @@
dc.set_port(port)
for section in properties:
- for prop, val in properties[section].iteritems():
+ for prop, val in six.iteritems(properties[section]):
dc.set_property(section, prop, val)
if refresh_index:
dc.set_refresh_index()
@@ -3395,7 +3397,7 @@
file_path = os.path.join(self.get_img_path(), path)
try:
- f = file(file_path)
+ f = open(file_path)
except:
self.assert_(False,
"File {0} does not exist or contain {1}".format(
@@ -3417,7 +3419,7 @@
image."""
file_path = os.path.join(self.get_img_path(), path)
- f = file(file_path)
+ f = open(file_path)
for line in f:
if string in line:
f.close()
@@ -3434,7 +3436,7 @@
f.write("\n{0}\n".format(string))
def seed_ta_dir(self, certs, dest_dir=None):
- if isinstance(certs, basestring):
+ if isinstance(certs, six.string_types):
certs = [certs]
if not dest_dir:
dest_dir = self.ta_dir
@@ -3618,7 +3620,7 @@
"""If we only use a single ApacheController, self.ac will
return that controller, otherwise we return None."""
if self.acs and len(self.acs) == 1:
- return self.acs[self.acs.keys()[0]]
+ return self.acs[list(self.acs.keys())[0]]
else:
return None
@@ -3688,7 +3690,7 @@
*args, **kwargs)
def seed_ta_dir(self, certs, dest_dir=None):
- if isinstance(certs, basestring):
+ if isinstance(certs, six.string_types):
certs = [certs]
if not dest_dir:
dest_dir = self.ta_dir
@@ -4385,12 +4387,12 @@
def _network_ping(self):
try:
- urllib2.urlopen(self.__url)
- except urllib2.HTTPError as e:
- if e.code == httplib.FORBIDDEN:
+ urlopen(self.__url)
+ except HTTPError as e:
+ if e.code == http_client.FORBIDDEN:
return True
return False
- except urllib2.URLError as e:
+ except URLError as e:
if isinstance(e.reason, ssl.SSLError):
return True
return False
@@ -4565,12 +4567,12 @@
def _network_ping(self):
try:
- urllib2.urlopen(urlparse.urljoin(self.url, "syspub/0"))
- except urllib2.HTTPError as e:
- if e.code == httplib.FORBIDDEN:
+ urlopen(urljoin(self.url, "syspub/0"))
+ except HTTPError as e:
+ if e.code == http_client.FORBIDDEN:
return True
return False
- except urllib2.URLError:
+ except URLError:
return False
return True
@@ -4586,16 +4588,16 @@
try:
# Ping the versions URL, rather than the default /
# so that we don't initialize the BUI code yet.
- repourl = urlparse.urljoin(self.url, "versions/0")
+ repourl = urljoin(self.url, "versions/0")
# Disable SSL peer verification, we just want to check
# if the depot is running.
- urllib2.urlopen(repourl,
+ urlopen(repourl,
context=ssl._create_unverified_context())
- except urllib2.HTTPError as e:
- if e.code == httplib.FORBIDDEN:
+ except HTTPError as e:
+ if e.code == http_client.FORBIDDEN:
return True
return False
- except urllib2.URLError:
+ except URLError:
return False
return True
--- a/src/util/apache2/depot/depot.conf.mako Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/apache2/depot/depot.conf.mako Wed Jul 01 16:20:01 2015 -0700
@@ -28,9 +28,7 @@
# dropped into an Apache conf.d directory, or it can be referenced from
# a more complete httpd.conf template via an include directive.
#
-</%doc><%
-import urllib
-%>
+</%doc>
RewriteEngine on
RewriteLog "${log_dir}/rewrite.log"
RewriteLogLevel 0
--- a/src/util/apache2/depot/depot_httpd.conf.mako Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/apache2/depot/depot_httpd.conf.mako Wed Jul 01 16:20:01 2015 -0700
@@ -29,7 +29,6 @@
</%doc>
<%
import os.path
- import urllib
context.write("""
#
# This is an automatically generated file for IPS repositories, and
--- a/src/util/apache2/depot/depot_index.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/apache2/depot/depot_index.py Wed Jul 01 16:20:01 2015 -0700
@@ -24,7 +24,6 @@
from __future__ import print_function
import atexit
import cherrypy
-import httplib
import logging
import mako
import os
@@ -35,8 +34,10 @@
import threading
import time
import traceback
-import urllib
-import Queue
+
+from six.moves import http_client, queue
+from six.moves.urllib.parse import quote
+from six.moves.urllib.request import urlopen
import pkg.p5i
import pkg.server.api
@@ -77,7 +78,7 @@
def __init__(self, request, message):
self.request = request
self.message = message
- self.http_status = httplib.INTERNAL_SERVER_ERROR
+ self.http_status = http_client.INTERNAL_SERVER_ERROR
def __str__(self):
return "{0}: {1}".format(self.message, self.request)
@@ -89,7 +90,7 @@
def __init__(self, request):
self.request = request
- self.http_status = httplib.FORBIDDEN
+ self.http_status = http_client.FORBIDDEN
def __str__(self):
return "admin/0 operations are disabled. " \
@@ -104,7 +105,7 @@
def __init__(self, request, cmd):
self.request = request
self.cmd = cmd
- self.http_status = httplib.NOT_IMPLEMENTED
+ self.http_status = http_client.NOT_IMPLEMENTED
def __str__(self):
return "admin/0 operations of type {type} are not " \
@@ -119,7 +120,7 @@
def __init__(self, request):
self.request = request
- self.http_status = httplib.FORBIDDEN
+ self.http_status = http_client.FORBIDDEN
def __str__(self):
return "admin/0 operations to refresh indexes are not " \
@@ -134,7 +135,7 @@
def __init__(self, size=10, busy_url=None):
self.size = size
- self.__q = Queue.Queue(self.size)
+ self.__q = queue.Queue(self.size)
self.__thread = None
self.__running = False
self.__keep_busy_thread = None
@@ -155,7 +156,7 @@
isn't full.
"""
if self.__q.unfinished_tasks > self.size - 1:
- raise Queue.Full()
+ raise queue.Full()
self.__q.put_nowait((task, args, kwargs))
self.__keep_busy = True
@@ -170,7 +171,7 @@
# for a new task to appear.
task, args, kwargs = \
self.__q.get(timeout=.5)
- except Queue.Empty:
+ except queue.Empty:
continue
task(*args, **kwargs)
if hasattr(self.__q, "task_done"):
@@ -190,7 +191,7 @@
time.sleep(KEEP_BUSY_INTERVAL)
if self.__keep_busy:
try:
- urllib.urlopen(self.__busy_url).close()
+ urlopen(self.__busy_url).close()
except Exception as e:
print("Failure encountered retrieving "
"busy url {0}: {1}".format(
@@ -376,7 +377,7 @@
# despite the fact that we're not serving content for any one
# repository. For the purposes of rendering this page, we'll
# use the first object we come across.
- depot = depot_buis[depot_buis.keys()[0]]
+ depot = depot_buis[list(depot_buis.keys())[0]]
accept_lang = self.get_accept_lang(cherrypy.request, depot)
cherrypy.request.path_info = "/{0}".format(accept_lang)
tlookup = mako.lookup.TemplateLookup(
@@ -430,7 +431,7 @@
# When serving theme resources we just choose the first
# repository we find, which is fine since we're serving
# content that's generic to all repositories, so we
- repo_prefix = repositories.keys()[0]
+ repo_prefix = list(repositories.keys())[0]
repo = repositories[repo_prefix]
depot_bui = depot_buis[repo_prefix]
# use our custom request_pub_func, since theme resources
@@ -502,7 +503,7 @@
pub_mf = "/".join(redir[0:4])
pkg_name = "/".join(redir[4:])
# encode the URI so our RewriteRules can process them
- pkg_name = urllib.quote(pkg_name)
+ pkg_name = quote(pkg_name)
pkg_name = pkg_name.replace("/", "%2F")
pkg_name = pkg_name.replace("%40", "@", 1)
@@ -645,7 +646,7 @@
try:
self.bgtask.put(repo.refresh_index,
pub=pub_prefix)
- except Queue.Full as e:
+ except queue.Full as e:
retries = 10
success = False
while retries > 0 and not success:
@@ -659,7 +660,7 @@
pass
if not success:
raise cherrypy.HTTPError(
- status=httplib.SERVICE_UNAVAILABLE,
+ status=http_client.SERVICE_UNAVAILABLE,
message="Unable to refresh the "
"index for {0} after repeated "
"retries. Try again later.".format(
@@ -697,7 +698,7 @@
self.config = {}
@staticmethod
- def default_error_page(status=httplib.NOT_FOUND, message="oops",
+ def default_error_page(status=http_client.NOT_FOUND, message="oops",
traceback=None, version=None):
"""This function is registered as the default error page
for CherryPy errors. This sets the response headers to
@@ -711,14 +712,14 @@
# Server errors are interesting, so let's log them. In the case
# of an internal server error, we send a 404 to the client. but
# log the full details in the server log.
- if (status == httplib.INTERNAL_SERVER_ERROR or
+ if (status == http_client.INTERNAL_SERVER_ERROR or
status.startswith("500 ")):
# Convert the error to a 404 to obscure implementation
# from the client, but log the original error to the
# server logs.
error = cherrypy._cperror._HTTPErrorTemplate % \
- {"status": httplib.NOT_FOUND,
- "message": httplib.responses[httplib.NOT_FOUND],
+ {"status": http_client.NOT_FOUND,
+ "message": http_client.responses[http_client.NOT_FOUND],
"traceback": "",
"version": cherrypy.__version__}
print("Path that raised exception was {0}".format(
@@ -727,7 +728,7 @@
return error
else:
error = cherrypy._cperror._HTTPErrorTemplate % \
- {"status": httplib.NOT_FOUND, "message": message,
+ {"status": http_client.NOT_FOUND, "message": message,
"traceback": "", "version": cherrypy.__version__}
return error
@@ -796,7 +797,7 @@
# converted and logged by our error handler
# before the client sees it.
raise cherrypy.HTTPError(
- status=httplib.INTERNAL_SERVER_ERROR,
+ status=http_client.INTERNAL_SERVER_ERROR,
message="".join(traceback.format_exc(e)))
wsgi_depot = WsgiDepot()
--- a/src/util/apache2/sysrepo/sysrepo_p5p.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/apache2/sysrepo/sysrepo_p5p.py Wed Jul 01 16:20:01 2015 -0700
@@ -24,24 +24,25 @@
from __future__ import print_function
import pkg.p5p
-import httplib
import os
import shutil
import simplejson
+import six
import sys
import threading
import traceback
+from six.moves import http_client
# redirecting stdout for proper WSGI portability
sys.stdout = sys.stderr
-SERVER_OK_STATUS = "{0} {1}".format(httplib.OK, httplib.responses[httplib.OK])
-SERVER_ERROR_STATUS = "{0} {1}".format(httplib.INTERNAL_SERVER_ERROR,
- httplib.responses[httplib.INTERNAL_SERVER_ERROR])
-SERVER_NOTFOUND_STATUS = "{0} {1}".format(httplib.NOT_FOUND,
- httplib.responses[httplib.NOT_FOUND])
-SERVER_BADREQUEST_STATUS = "{0} {1}".format(httplib.BAD_REQUEST,
- httplib.responses[httplib.BAD_REQUEST])
+SERVER_OK_STATUS = "{0} {1}".format(http_client.OK, http_client.responses[http_client.OK])
+SERVER_ERROR_STATUS = "{0} {1}".format(http_client.INTERNAL_SERVER_ERROR,
+ http_client.responses[http_client.INTERNAL_SERVER_ERROR])
+SERVER_NOTFOUND_STATUS = "{0} {1}".format(http_client.NOT_FOUND,
+ http_client.responses[http_client.NOT_FOUND])
+SERVER_BADREQUEST_STATUS = "{0} {1}".format(http_client.BAD_REQUEST,
+ http_client.responses[http_client.BAD_REQUEST])
response_headers = [("content-type", "application/binary")]
@@ -417,7 +418,7 @@
if __name__ == "__main__":
"""A simple main function to allows us to test any given query/env"""
- import urllib
+ from six.moves.urllib.parse import unquote
def start_response(status, response_headers, exc_info=None):
"""A dummy response function."""
@@ -439,14 +440,14 @@
# unquote the url, so that we can easily copy/paste entries from
# Apache logs when testing.
- environ["QUERY_STRING"] = urllib.unquote(sys.argv[1])
+ environ["QUERY_STRING"] = unquote(sys.argv[1])
environ["SYSREPO_RUNTIME_DIR"] = os.environ["PWD"]
environ["PKG5_TEST_ENV"] = "True"
hsh, path = sys.argv[2].split("=")
environ[hsh] = path
for response in application(environ, start_response):
- if isinstance(response, basestring):
+ if isinstance(response, six.string_types):
print(response.rstrip())
elif response:
for line in response.readlines():
--- a/src/util/log-scripts/an2_ip_active.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/an2_ip_active.py Wed Jul 01 16:20:01 2015 -0700
@@ -25,7 +25,7 @@
#
from __future__ import print_function
-import cPickle as pickle
+import six.moves.cPickle as pickle
import datetime
import time
@@ -83,7 +83,7 @@
merge_entries_by_date = {}
for fn in sys.argv[1:]:
- f = file(fn, "rb")
+ f = open(fn, "rb")
ebd = pickle.load(f)
f.close()
--- a/src/util/log-scripts/an_filelist.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/an_filelist.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,7 +33,6 @@
import re
import sys
import time
-import urllib
from an_report import *
--- a/src/util/log-scripts/an_ip_active.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/an_ip_active.py Wed Jul 01 16:20:01 2015 -0700
@@ -24,7 +24,7 @@
# Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
#
-import cPickle as pickle
+import six.moves.cPickle as pickle
import datetime
import fileinput
import GeoIP
@@ -34,7 +34,6 @@
import re
import sys
import time
-import urllib
from an_report import *
@@ -96,7 +95,7 @@
continue
mg = m.groupdict()
-
+
d = None
if lastdatetime and mg["date"] == lastdate:
--- a/src/util/log-scripts/an_manifest.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/an_manifest.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,9 +33,9 @@
import re
import sys
import time
-import urllib
from an_report import *
+from six.moves.urllib.parse import unquote
after = None
before = None
@@ -85,14 +85,14 @@
pg = pm.groupdict()
try:
- manifest_by_pkg[urllib.unquote(pg["stem"])] += 1
+ manifest_by_pkg[unquote(pg["stem"])] += 1
except KeyError:
- manifest_by_pkg[urllib.unquote(pg["stem"])] = 1
+ manifest_by_pkg[unquote(pg["stem"])] = 1
try:
- manifest_by_ver_pkg[urllib.unquote(pg["stem"] + "@" + pg["version"])] += 1
+ manifest_by_ver_pkg[unquote(pg["stem"] + "@" + pg["version"])] += 1
except KeyError:
- manifest_by_ver_pkg[urllib.unquote(pg["stem"] + "@" + pg["version"])] = 1
+ manifest_by_ver_pkg[unquote(pg["stem"] + "@" + pg["version"])] = 1
agent = pkg_agent_pat.search(mg["agent"])
if agent == None:
--- a/src/util/log-scripts/an_report.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/an_report.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,9 +33,10 @@
import re
import socket
import sys
-import urllib2
import config
+from six.moves.urllib.request import urlopen
+
# Apache combined log pattern
comb_log_pat = re.compile("(?P<ip>[\d\.]*) - - \[(?P<date>[^:]*):(?P<time>\S*) (?P<tz>[^\]]*)\] \"(?P<op>GET|POST|HEAD|\S*) (?P<uri>\S*) HTTP/(?P<httpver>[^\"]*)\" (?P<response>\d*) (?P<subcode>\d*|-) \"(?P<refer>[^\"]*)\" \"(?P<agent>[^\"]*)\" \"(?P<uuid>[^\"]*)\" \"(?P<intent>[^\"]*)\"")
@@ -62,7 +63,7 @@
host_cache = {}
host_props["outstanding"] = 0
-def host_cache_save():
+def host_cache_save():
pklfile = open(host_props["file_name"], 'wb')
pickle.dump(host_cache, pklfile)
pklfile.close()
@@ -111,7 +112,7 @@
def retrieve_chart(url, fileprefix):
f = open("{0}.png".format(fileprefix), "w")
try:
- u = urllib2.urlopen(url)
+ u = urlopen(url)
f.write(u.read())
except:
print("an_catalog: couldn't retrieve chart '{0}'".format(url),
@@ -146,7 +147,7 @@
</body>
</html>""")
-
+
def report_section_begin(cap_title, summary_file = None):
msg = """\
<br clear="all" />
@@ -211,7 +212,7 @@
days += 1
total += data[i]
- print(i, data[i], file=rf)
+ print(i, data[i], file=rf)
if chart_data == "":
chart_data = "{0:d}".format(data[i])
else:
@@ -239,7 +240,7 @@
<img src=\"{1}\" alt=\"{2}\" /><br />""".format(url, fname, title)
rf.close()
-
+
print(msg)
if summary_file:
print(msg, file=summary_file)
@@ -313,7 +314,7 @@
sel = ""
msg += """\
- </ul>
+ </ul>
<div class="yui-content">"""
for r in map_regions:
@@ -327,7 +328,7 @@
</div>
</div>
<small>Color intensity linear in log of requests.</small>"""
-
+
print(msg)
if summary_file:
--- a/src/util/log-scripts/an_search.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/an_search.py Wed Jul 01 16:20:01 2015 -0700
@@ -33,9 +33,9 @@
import re
import sys
import time
-import urllib
from an_report import *
+from six.moves.urllib.parse import unquote
after = None
before = None
@@ -121,7 +121,7 @@
if pm != None:
pg = pm.groupdict()
- kw = urllib.unquote(pg["keywords"])
+ kw = unquote(pg["keywords"])
if mg["response"] == "200":
if mg["subcode"] == "-":
@@ -146,7 +146,7 @@
search_by_failure[kw] = 1
# XXX should measure downtime via 503, other failure responses
-
+
agent = pkg_agent_pat.search(mg["agent"])
if agent == None:
@@ -189,7 +189,7 @@
continue
mg = m.groupdict()
-
+
d = None
if lastdatetime and mg["date"] == lastdate:
@@ -217,7 +217,7 @@
report_by_country(search_by_country, "search", summary_file = summary_file)
report_col_end("r", summary_file = summary_file)
report_cols_end(summary_file = summary_file)
-
+
report_cols_begin(summary_file = summary_file)
report_col_begin("l", summary_file = summary_file)
report_search_by_failure()
--- a/src/util/log-scripts/config.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/config.py Wed Jul 01 16:20:01 2015 -0700
@@ -20,16 +20,17 @@
# CDDL HEADER END
#
-# Copyright 2008 Sun Microsystems, Inc. All rights reserved.
-# Use is subject to license terms.
+#
+# Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved.
+#
import os
-import ConfigParser
+from six.moves import configparser
CFGFILE="site-config"
def get(option, default=None):
- cfg = ConfigParser.ConfigParser()
+ cfg = configparser.ConfigParser()
cfg.read(CFGFILE)
value = cfg.get("default", option)
if not value:
--- a/src/util/log-scripts/log.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/log-scripts/log.py Wed Jul 01 16:20:01 2015 -0700
@@ -107,7 +107,7 @@
assert not fname == None
- lg = file(fname)
+ lg = open(fname)
for l in lg.readlines():
process(l)
--- a/src/util/publish/pkgdiff.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/publish/pkgdiff.py Wed Jul 01 16:20:01 2015 -0700
@@ -38,6 +38,7 @@
import pkg.misc as misc
from pkg.misc import PipeError
from collections import defaultdict
+from itertools import product
def usage(errmsg="", exitcode=2):
"""Emit a usage message and optionally prefix it with a more specific
@@ -311,8 +312,8 @@
s.append(" + {0}".format(new.hash))
attrdiffs = (set(new.differences(old)) -
ignoreattrs)
- attrsames = sorted( list(set(old.attrs.keys() +
- new.attrs.keys()) -
+ attrsames = sorted( list(set(list(old.attrs.keys()) +
+ list(new.attrs.keys())) -
set(new.differences(old))))
else:
if hasattr(old, "hash") and "hash" in onlyattrs:
@@ -321,8 +322,8 @@
s.append(" + {0}".format(new.hash))
attrdiffs = (set(new.differences(old)) &
onlyattrs)
- attrsames = sorted(list(set(old.attrs.keys() +
- new.attrs.keys()) -
+ attrsames = sorted(list(set(list(old.attrs.keys()) +
+ list(new.attrs.keys())) -
set(new.differences(old))))
for a in sorted(attrdiffs):
@@ -356,17 +357,6 @@
return int(different)
-def product(*args, **kwds):
- # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
- # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
- # from python 2.7 itertools
- pools = map(tuple, args) * kwds.get('repeat', 1)
- result = [[]]
- for pool in pools:
- result = [x+[y] for x in result for y in pool]
- for prod in result:
- yield tuple(prod)
-
if __name__ == "__main__":
try:
exit_code = main_func()
--- a/src/util/publish/pkgfmt.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/publish/pkgfmt.py Wed Jul 01 16:20:01 2015 -0700
@@ -24,6 +24,7 @@
#
from __future__ import print_function
+import six
# Prefixes should be ordered alphabetically with most specific first.
DRIVER_ALIAS_PREFIXES = (
@@ -446,7 +447,7 @@
rem_count = total_count
# Now build the action output string an attribute at a time.
- for k, v in sorted(sattrs.iteritems(), cmp=cmpkv):
+ for k, v in sorted(six.iteritems(sattrs), cmp=cmpkv):
# Newline breaks are only forced when there is more than
# one value for an attribute.
if not (isinstance(v, list) or isinstance(v, set)):
--- a/src/util/publish/pkglint.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/publish/pkglint.py Wed Jul 01 16:20:01 2015 -0700
@@ -19,13 +19,14 @@
#
# CDDL HEADER END
#
-
+
#
# Copyright (c) 2010, 2015, Oracle and/or its affiliates. All rights reserved.
#
import codecs
import logging
+import six
import sys
import gettext
import locale
@@ -62,7 +63,7 @@
codeset=locale.getpreferredencoding())
global logger
-
+
usage = \
_("\n"
" %prog [-b build_no] [-c cache_dir] [-f file]\n"
@@ -178,15 +179,21 @@
if "pkglint_desc" in method.__dict__ and not verbose:
return method.pkglint_desc
else:
- return "{0}.{1}.{2}".format(method.im_class.__module__,
- method.im_class.__name__,
- method.im_func.func_name)
+ if six.PY3:
+ return "{0}.{1}.{2}".format(method.__self__.__class__.__module__,
+ method.__self__.__class__.__name__,
+ method.__func__.__name__)
+ else:
+ return "{0}.{1}.{2}".format(method.im_class.__module__,
+ method.im_class.__name__,
+ method.im_func.func_name)
+
def emit(name, value):
msg("{0} {1}".format(name.ljust(width), value))
def print_list(items):
- k = items.keys()
+ k = list(items.keys())
k.sort()
for lint_id in k:
emit(lint_id, items[lint_id])
--- a/src/util/publish/pkgmerge.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/publish/pkgmerge.py Wed Jul 01 16:20:01 2015 -0700
@@ -34,10 +34,10 @@
import locale
import os
import shutil
+ import six
import sys
import tempfile
import traceback
- import urllib
import pkg.actions as actions
import pkg.fmri
@@ -51,6 +51,7 @@
from functools import reduce
from pkg.misc import PipeError, emsg, msg
+ from six.moves.urllib.parse import quote
except KeyboardInterrupt:
import sys
sys.exit(1)
@@ -58,12 +59,8 @@
class PkgmergeException(Exception):
"""An exception raised if something goes wrong during the merging
process."""
+ pass
- def __unicode__(self):
- # To workaround python issues 6108 and 2517, this provides a
- # a standard wrapper for this class' exceptions so that they
- # have a chance of being stringified correctly.
- return str(self)
catalog_dict = {} # hash table of catalogs by source uri
fmri_cache = {}
@@ -180,7 +177,7 @@
pub.repository = pub.repository
def get_all_pkg_names(repouri):
- return catalog_dict[repouri.uri].keys()
+ return list(catalog_dict[repouri.uri].keys())
def get_manifest(repouri, fmri):
"""Fetch the manifest for package-fmri 'fmri' from the source
@@ -255,7 +252,7 @@
variants = set()
vcombos = collections.defaultdict(set)
for src_vars in variant_list:
- for v, vval in src_vars.iteritems():
+ for v, vval in six.iteritems(src_vars):
variants.add(v)
vcombos[v].add((v, vval))
@@ -488,7 +485,7 @@
open_time = pfmri.get_timestamp()
return "{0:d}_{0}".format(
calendar.timegm(open_time.utctimetuple()),
- urllib.quote(str(pfmri), ""))
+ quote(str(pfmri), ""))
for entry in processdict:
man, retrievals = merge_fmris(source_list,
@@ -840,7 +837,7 @@
include_dict[pkg_name] -= exclude_dict[pkg_name]
return dict((k, sorted(list(v), reverse=True)[0])
- for k,v in include_dict.iteritems()
+ for k,v in six.iteritems(include_dict)
if v), include_misses
def match_user_fmris(patterns, cat):
--- a/src/util/publish/pkgmogrify.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/publish/pkgmogrify.py Wed Jul 01 16:20:01 2015 -0700
@@ -30,6 +30,7 @@
import os
import re
import shlex
+import six
import sys
import traceback
import warnings
@@ -239,7 +240,7 @@
# It's now appropriate to compile the regexp, if there
# are substitutions to be made. So do the substitution
# and compile the result.
- if isinstance(regexp, basestring):
+ if isinstance(regexp, six.string_types):
rx = re.compile(substitute_values(regexp,
action, matches, pkg_attrs, filename, lineno))
else:
@@ -433,7 +434,7 @@
if not d["quote"]:
q = lambda x: x
- if isinstance(attr, basestring):
+ if isinstance(attr, six.string_types):
newmsg += msg[prevend:i.start()] + \
d.get("prefix", "") + q(attr) + d.get("suffix", "")
else:
@@ -518,7 +519,7 @@
s = transform[11:transform.index("->")]
# Map each pattern to its position in the original match string.
matchorder = {}
- for attr, match in attrdict.iteritems():
+ for attr, match in six.iteritems(attrdict):
# Attributes might be quoted even if they don't need it,
# and lead to a mis-match. These three patterns are all
# safe to try. If we fail to find the match expression,
@@ -551,18 +552,18 @@
action = action[1]
if verbose:
if not action or \
- not isinstance(action, basestring) and \
+ not isinstance(action, six.string_types) and \
orig_attrs != action.attrs:
comments.append("# Applied: {0} (file {1} line {2})".format(
transform, filename, lineno))
comments.append("# Result: {0}".format(action))
- if not action or isinstance(action, basestring):
+ if not action or isinstance(action, six.string_types):
break
# Any newly-created actions need to have the transforms applied, too.
newnewactions = []
for act in newactions:
- if not isinstance(act, basestring):
+ if not isinstance(act, six.string_types):
c, al = apply_transforms(act, pkg_attrs, verbose,
act_filename, act_lineno)
if c:
@@ -586,7 +587,7 @@
if filename.startswith("/") or try_cwd == True and \
os.path.exists(filename):
try:
- return filename, file(filename)
+ return filename, open(filename)
except IOError as e:
raise RuntimeError(_("Cannot open file: {0}").format(e))
@@ -594,7 +595,7 @@
f = os.path.join(i, filename)
if os.path.exists(f):
try:
- return f, file(f)
+ return f, open(f)
except IOError as e:
raise RuntimeError(_("Cannot open file: {0}").format(e))
@@ -764,7 +765,7 @@
if act.name == "set":
name = act.attrs["name"]
value = act.attrs["value"]
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
pkg_attrs.setdefault(name, []).append(value)
else:
pkg_attrs.setdefault(name, []).extend(value)
@@ -778,7 +779,7 @@
if printfilename == None:
printfile = sys.stdout
else:
- printfile = file(printfilename, "w")
+ printfile = open(printfilename, "w")
for p in printinfo:
print("{0}".format(p), file=printfile)
@@ -790,7 +791,7 @@
if outfilename == None:
outfile = sys.stdout
else:
- outfile = file(outfilename, "w")
+ outfile = open(outfilename, "w")
emitted = set()
for comment, actionlist, prepended_macro in output:
--- a/src/util/publish/pkgsurf.py Tue Jun 30 11:44:33 2015 -0700
+++ b/src/util/publish/pkgsurf.py Wed Jul 01 16:20:01 2015 -0700
@@ -71,6 +71,7 @@
import locale
import os
import shutil
+import six
import sys
import tempfile
import traceback
@@ -238,7 +239,7 @@
msg += "\n\t".join(unmatched)
abort(msg)
- return matching.keys()
+ return list(matching.keys())
def get_manifest(repo, pub, pfmri):
""" Retrieve a manifest with FMRI 'pfmri' of publisher 'pub' from
@@ -388,7 +389,7 @@
if a.name == "depend":
# TODO: support dependency lists
# For now, treat as content change.
- if not isinstance(a.attrs["fmri"], basestring):
+ if not isinstance(a.attrs["fmri"], six.string_types):
return False
dpfmri = fmri.PkgFmri(a.attrs["fmri"])
deps.add(dpfmri.get_pkg_stem())
--- a/src/web/config.shtml Tue Jun 30 11:44:33 2015 -0700
+++ b/src/web/config.shtml Wed Jul 01 16:20:01 2015 -0700
@@ -22,8 +22,8 @@
## Copyright 2008, 2010 Oracle and/or its affiliates. All rights reserved.
##
<%!
- import urlparse
import re
+ from six.moves.urllib.parse import urlparse
%>\
<%page args="g_vars"/>\
## Please note that the opensolaris.org and sun.com themes can not be used
@@ -44,7 +44,7 @@
# If a theme was not specified, attempt to determine which theme
# to use based on an absolute URL to the current server.
scheme, netloc, path, params, query, fragment = \
- urlparse.urlparse(request.url(), allow_fragments=0)
+ urlparse(request.url(), allow_fragments=0)
match = re.match("(?:.*[.])?(opensolaris.(?:com|org)|sun.com|oracle.com)", netloc)
if match:
--- a/src/web/en/advanced_search.shtml Tue Jun 30 11:44:33 2015 -0700
+++ b/src/web/en/advanced_search.shtml Wed Jul 01 16:20:01 2015 -0700
@@ -23,7 +23,7 @@
##
<%!
import pkg.query_parser as qp
- import urllib
+ from six.moves.urllib.parse import unquote, quote
%>\
<%inherit file="search.shtml"/>\
<%page args="g_vars"/>\
@@ -45,7 +45,7 @@
versions.insert(0, ("", "", "All"))
# The string value representing the user's selection.
- selected_val = urllib.unquote(request.params.get("v", ""))
+ selected_val = unquote(request.params.get("v", ""))
# The version object matching the string value of the user's selection.
selected_ver = None
@@ -182,7 +182,7 @@
attrs = 'selected="selected" '
context.write("""<option {0}value="{1}">{2}</option>""".format(
- attrs, urllib.quote(val, ""), label))
+ attrs, quote(val, ""), label))
%>\
</select>
</td>
--- a/src/web/en/catalog.shtml Tue Jun 30 11:44:33 2015 -0700
+++ b/src/web/en/catalog.shtml Wed Jul 01 16:20:01 2015 -0700
@@ -23,7 +23,7 @@
##
<%!
import pkg.fmri
- import urllib
+ from six.moves.urllib.parse import quote
%>\
<%inherit file="layout.shtml"/>\
<%page args="g_vars"/>\
@@ -231,11 +231,11 @@
# Start FMRI entry
phref = self.shared.rpath(g_vars, "info/0/{0}".format(
- urllib.quote(str(pfmri), "")))
+ quote(str(pfmri), "")))
# XXX the .p5i extension is a bogus hack because
# packagemanager requires it and shouldn't.
p5ihref = self.shared.rpath(g_vars, "p5i/0/{0}.p5i".format(
- urllib.quote(pfmri.pkg_name, "")))
+ quote(pfmri.pkg_name, "")))
mhref = self.shared.rpath(g_vars,
"manifest/0/{0}".format(pfmri.get_url_path()))
%>
--- a/src/web/en/search.shtml Tue Jun 30 11:44:33 2015 -0700
+++ b/src/web/en/search.shtml Wed Jul 01 16:20:01 2015 -0700
@@ -22,15 +22,19 @@
## Copyright (c) 2010, 2015, Oracle and/or its affiliates. All rights reserved.
##
<%!
- import cgi
+ import six
+ if six.PY3:
+ import html as cgi
+ else:
+ # cgi.escape is deprecated since python 3.2
+ import cgi
import itertools
import pkg.actions as actions
import pkg.query_parser as qp
import pkg.server.api_errors as api_errors
import pkg.version as version
import re
- import urllib
- import urlparse
+ from six.moves.urllib.parse import urlencode, quote, urlparse, urlunparse
%>\
<%inherit file="layout.shtml"/>\
<%page args="g_vars"/>\
@@ -119,7 +123,7 @@
query_error = str(e)
except Exception as e:
results = None
- query_error = urllib.quote(str(e))
+ query_error = quote(str(e))
# Before showing the results, the type of results being shown has to be
# determined since the user might have overridden the return_type
@@ -177,7 +181,7 @@
# starting range of the previous page of search results set.
uri = request.url(qs=request.query_string, relative=True)
- scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
+ scheme, netloc, path, params, query, fragment = urlparse(uri)
nparams = []
for name, val in request.params.iteritems():
@@ -191,8 +195,8 @@
start = 0
nparams.append(("start", start))
- qs = urllib.urlencode(nparams)
- uri = urlparse.urlunparse((scheme, netloc, path, params, qs, fragment))
+ qs = urlencode(nparams)
+ uri = urlunparse((scheme, netloc, path, params, qs, fragment))
return uri
%></%def>\
@@ -201,7 +205,7 @@
# starting range of the next page of search results set.
uri = request.url(qs=request.query_string, relative=True)
- scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
+ scheme, netloc, path, params, query, fragment = urlparse(uri)
nparams = []
for name, val in request.params.iteritems():
@@ -212,8 +216,8 @@
start = criteria["start"]
nparams.append(("start", (start + result_count - 1)))
- qs = urllib.urlencode(nparams)
- uri = urlparse.urlunparse((scheme, netloc, path, params, qs, fragment))
+ qs = urlencode(nparams)
+ uri = urlunparse((scheme, netloc, path, params, qs, fragment))
return uri
%></%def>\
@@ -307,7 +311,7 @@
uri = "/depot/{0}/{1}/{2}".format(http_depot,
lang, uri)
scheme, netloc, path, params, query, \
- fragment = urlparse.urlparse(uri)
+ fragment = urlparse(uri)
nparams = []
for name, val in request.params.iteritems():
@@ -319,8 +323,8 @@
if query_error:
nparams.append(("qe", query_error))
- qs = urllib.urlencode(nparams)
- uri = urlparse.urlunparse((scheme, netloc, path, params,
+ qs = urlencode(nparams)
+ uri = urlunparse((scheme, netloc, path, params,
qs, fragment))
raise api_errors.RedirectException(uri)
@@ -374,11 +378,11 @@
pfmri_uri = pfmri.get_fmri(anarchy=True,
include_scheme=False)
phref = self.shared.rpath(g_vars, "info/0/{0}".format(
- urllib.quote(pfmri_uri, "")))
+ quote(pfmri_uri, "")))
# XXX the .p5i extension is a bogus hack because
# packagemanager requires it and shouldn't.
p5ihref = self.shared.rpath(g_vars,
- "p5i/0/{0}.p5i".format(urllib.quote(stem, "")))
+ "p5i/0/{0}.p5i".format(quote(stem, "")))
mhref = self.shared.rpath(g_vars,
"manifest/0/{0}".format(pfmri.get_url_path()))
%>\
@@ -435,7 +439,7 @@
pfmri_str = pfmri.get_fmri(anarchy=True,
include_scheme=False)
phref = self.shared.rpath(g_vars, "info/0/{0}".format(
- urllib.quote(pfmri_str, "")))
+ quote(pfmri_str, "")))
%>\
<tr${rclass}>
<td>${index | h}</td>