15737136 want ability to copy packages to a different publisher with pkgrecv
15701152 make pkgmogrify into a module
21795733 pkgmogrify include directive does not search files from current directory
--- a/src/man/pkgrecv.1 Tue Oct 06 15:15:19 2015 -0700
+++ b/src/man/pkgrecv.1 Thu Oct 08 05:30:35 2015 +0000
@@ -7,11 +7,11 @@
<refentry id="pkgrecv-1">
<refmeta><refentrytitle>pkgrecv</refentrytitle><manvolnum>1</manvolnum>
-<refmiscinfo class="date">21 Nov 2013</refmiscinfo>
+<refmiscinfo class="date">08 Oct 2015</refmiscinfo>
<refmiscinfo class="sectdesc">&man1;</refmiscinfo>
<refmiscinfo class="software">&release;</refmiscinfo>
<refmiscinfo class="arch">generic</refmiscinfo>
-<refmiscinfo class="copyright">Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.</refmiscinfo>
+<refmiscinfo class="copyright">Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.</refmiscinfo>
</refmeta>
<refnamediv>
<refname>pkgrecv</refname>
@@ -20,7 +20,7 @@
<refsynopsisdiv><title></title>
<synopsis>/usr/bin/pkgrecv [-aknrv] [-s (<replaceable>src_path</replaceable>|<replaceable>src_uri</replaceable>)]
[-d (<replaceable>dest_path</replaceable>|<replaceable>dest_uri</replaceable>)] [-c <replaceable>cache_dir</replaceable>]
- [-m <replaceable>match</replaceable>] [--raw]
+ [-m <replaceable>match</replaceable>] [--mog-file <replaceable>file_path</replaceable> ...] [--raw]
[--key <replaceable>src_key</replaceable> --cert <replaceable>src_cert</replaceable>]
[--dkey <replaceable>dest_key</replaceable> --dcert <replaceable>dest_cert</replaceable>]
(<replaceable>fmri</replaceable>|<replaceable>pattern</replaceable>) ...</synopsis>
@@ -197,6 +197,17 @@
operation.</para>
</listitem>
</varlistentry>
+<varlistentry><term><option>-mog-file</option></term>
+<listitem><para>Specifies a file containing pkgmogrify(1) transforms to be
+applied to the manifest of each package before it is copied to the destination
+package repository. '-' can be specified to use stdin as input. If both files
+and '-' are specified together, '-' will be ignored and only files specified
+are used. Use of this option will automatically cause any existing package
+signatures to be dropped from the package. Adding new hashable actions
+such as files and licenses is not allowed. This option can be specified
+multiple times. This option may not be combined with --clone.</para>
+</listitem>
+</varlistentry>
<varlistentry><term><option>-newest</option></term>
<listitem><para>List the most recent versions of the packages available from
the repository specified by the <option>s</option> option. All other options
@@ -298,6 +309,13 @@
located at <filename>/export/repo</filename>.</para>
<screen>$ <userinput>pkgrecv -s /my/archive.p5p -d /export/repo '*'</userinput></screen>
</example>
+<example><title>Change publisher name</title>
+<para>Change the publisher name of the package 'foo' and all its
+dependencies into 'extra' during republishing.</para>
+<screen>$ <userinput>echo '<transform set name=pkg.fmri -> edit value
+(pkg://).*?(/.*) \\\1extra\\\2>' | pkgrecv -s repo1 -d repo2 --mog-file - foo
+</userinput></screen>
+</example>
</refsect1>
<refsect1 role="environment-variables"><title></title>
<para>The following environment variables are supported:</para>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/mogrify.py Thu Oct 08 05:30:35 2015 +0000
@@ -0,0 +1,765 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+
+#
+# Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+#
+
+
+from __future__ import print_function
+import os
+import re
+import shlex
+import six
+import sys
+
+import pkg.actions
+
+
+def add_transform(transforms, printinfo, transform, filename, lineno):
+ """This routine adds a transform tuple to the list used
+ to process actions."""
+
+ # strip off transform
+ s = transform[10:]
+ # make error messages familiar
+ transform = "<" + transform + ">"
+
+ try:
+ index = s.index("->")
+ except ValueError:
+ raise RuntimeError(_("Missing -> in transform"))
+ matching = s[0:index].strip().split()
+ types = [a for a in matching if "=" not in a]
+ attrdict = pkg.actions.attrsfromstr(" ".join([a for a in matching if "=" in a]))
+
+ for a in attrdict:
+ try:
+ attrdict[a] = re.compile(attrdict[a])
+ except re.error as e:
+ raise RuntimeError(
+ _("transform ({transform}) has regexp error "
+ "({err}) in matching clause"
+ ).format(transform=transform, err=e))
+
+ op = s[index+2:].strip().split(None, 1)
+
+ # use closures to encapsulate desired operation
+
+ if op[0] == "drop":
+ if len(op) > 1:
+ raise RuntimeError(
+ _("transform ({0}) has 'drop' operation syntax error"
+ ).format(transform))
+ operation = lambda a, m, p, f, l: None
+
+ elif op[0] == "set":
+ try:
+ attr, value = shlex.split(op[1])
+ except ValueError:
+ raise RuntimeError(
+ _("transform ({0}) has 'set' operation syntax error"
+ ).format(transform))
+ def set_func(action, matches, pkg_attrs, filename, lineno):
+ newattr = substitute_values(attr, action, matches,
+ pkg_attrs, filename, lineno)
+ newval = substitute_values(value, action, matches,
+ pkg_attrs, filename, lineno)
+ if newattr == "action.hash":
+ if hasattr(action, "hash"):
+ action.hash = newval
+ else:
+ action.attrs[newattr] = newval
+ return action
+ operation = set_func
+
+ elif op[0] == "default":
+ try:
+ attr, value = shlex.split(op[1])
+ except ValueError:
+ raise RuntimeError(
+ _("transform ({0}) has 'default' operation syntax error"
+ ).format(transform))
+
+ def default_func(action, matches, pkg_attrs, filename, lineno):
+ newattr = substitute_values(attr, action, matches,
+ pkg_attrs, filename, lineno)
+ if newattr not in action.attrs:
+ newval = substitute_values(value, action,
+ matches, pkg_attrs, filename, lineno)
+ action.attrs[newattr] = newval
+ return action
+ operation = default_func
+
+ elif op[0] == "abort":
+ if len(op) > 1:
+ raise RuntimeError(_("transform ({0}) has 'abort' "
+ "operation syntax error").format(transform))
+
+ def abort_func(action, matches, pkg_attrs, filename, lineno):
+ sys.exit(0)
+
+ operation = abort_func
+
+ elif op[0] == "exit":
+ exitval = 0
+ msg = None
+
+ if len(op) == 2:
+ args = op[1].split(None, 1)
+ try:
+ exitval = int(args[0])
+ except ValueError:
+ raise RuntimeError(_("transform ({0}) has 'exit' "
+ "operation syntax error: illegal exit value").format(
+ transform))
+ if len(args) == 2:
+ msg = args[1]
+
+ def exit_func(action, matches, pkg_attrs, filename, lineno):
+ if msg:
+ newmsg = substitute_values(msg, action,
+ matches, pkg_attrs, filename, lineno,
+ quote=True)
+ print(newmsg, file=sys.stderr)
+ sys.exit(exitval)
+
+ operation = exit_func
+
+ elif op[0] == "add":
+ try:
+ attr, value = shlex.split(op[1])
+ except ValueError:
+ raise RuntimeError(
+ _("transform ({0}) has 'add' operation syntax error"
+ ).format(transform))
+
+ def add_func(action, matches, pkg_attrs, filename, lineno):
+ newattr = substitute_values(attr, action, matches,
+ pkg_attrs, filename, lineno)
+ newval = substitute_values(value, action, matches,
+ pkg_attrs, filename, lineno)
+ if newattr in action.attrs:
+ av = action.attrs[newattr]
+ if isinstance(av, list):
+ action.attrs[newattr].append(newval)
+ else:
+ action.attrs[newattr] = [ av, newval ]
+ else:
+ action.attrs[newattr] = newval
+ return action
+ operation = add_func
+
+ elif op[0] == "edit":
+ if len(op) < 2:
+ raise RuntimeError(
+ _("transform ({0}) has 'edit' operation syntax error"
+ ).format(transform))
+
+ args = shlex.split(op[1])
+ if len(args) not in [2, 3]:
+ raise RuntimeError(
+ _("transform ({0}) has 'edit' operation syntax error"
+ ).format(transform))
+ attr = args[0]
+
+ # Run args[1] (the regexp) through substitute_values() with a
+ # bunch of bogus values to see whether it triggers certain
+ # exceptions. If it does, then substitution would have
+ # occurred, and we can't compile the regex now, but wait until
+ # we can correctly run substitute_values().
+ try:
+ substitute_values(args[1], None, [], None, None, None)
+ regexp = re.compile(args[1])
+ except (AttributeError, RuntimeError):
+ regexp = args[1]
+ except re.error as e:
+ raise RuntimeError(
+ _("transform ({transform}) has 'edit' operation "
+ "with malformed regexp ({err})").format(
+ transform=transform, err=e))
+
+ if len(args) == 3:
+ replace = args[2]
+ else:
+ replace = ""
+
+ def replace_func(action, matches, pkg_attrs, filename, lineno):
+ newattr = substitute_values(attr, action, matches,
+ pkg_attrs, filename, lineno)
+ newrep = substitute_values(replace, action, matches,
+ pkg_attrs, filename, lineno)
+ val = attrval_as_list(action.attrs, newattr)
+
+ if not val:
+ return action
+
+ # It's now appropriate to compile the regexp, if there
+ # are substitutions to be made. So do the substitution
+ # and compile the result.
+ if isinstance(regexp, six.string_types):
+ rx = re.compile(substitute_values(regexp,
+ action, matches, pkg_attrs, filename, lineno))
+ else:
+ rx = regexp
+
+ try:
+ action.attrs[newattr] = [
+ rx.sub(newrep, v)
+ for v in val
+ ]
+ except re.error as e:
+ raise RuntimeError(
+ _("transform ({transform}) has edit "
+ "operation with replacement string regexp "
+ "error {err}").format(
+ transform=transform, err=e))
+ return action
+
+ operation = replace_func
+
+ elif op[0] == "delete":
+ if len(op) < 2:
+ raise RuntimeError(
+ _("transform ({0}) has 'delete' operation syntax error"
+ ).format(transform))
+
+ args = shlex.split(op[1])
+ if len(args) != 2:
+ raise RuntimeError(
+ _("transform ({0}) has 'delete' operation syntax error"
+ ).format(transform))
+ attr = args[0]
+
+ try:
+ regexp = re.compile(args[1])
+ except re.error as e:
+ raise RuntimeError(
+ _("transform ({transform}) has 'delete' operation"
+ "with malformed regexp ({err})").format(
+ transform=transform, err=e))
+
+ def delete_func(action, matches, pkg_attrs, filename, lineno):
+ val = attrval_as_list(action.attrs, attr)
+ if not val:
+ return action
+ try:
+ new_val = [
+ v
+ for v in val
+ if not regexp.search(v)
+ ]
+
+ if new_val:
+ action.attrs[attr] = new_val
+ else:
+ del action.attrs[attr]
+ except re.error as e:
+ raise RuntimeError(
+ _("transform ({transform}) has delete "
+ "operation with replacement string regexp "
+ "error {err}").format(
+ transform=transform, err=e))
+ return action
+
+ operation = delete_func
+
+ elif op[0] == "print":
+ if len(op) > 2:
+ raise RuntimeError(_("transform ({0}) has 'print' "
+ "operation syntax error").format(transform))
+
+ if len(op) == 1:
+ msg = ""
+ else:
+ msg = op[1]
+
+ def print_func(action, matches, pkg_attrs, filename, lineno):
+ newmsg = substitute_values(msg, action, matches,
+ pkg_attrs, filename, lineno, quote=True)
+
+ printinfo.append("{0}".format(newmsg))
+ return action
+
+ operation = print_func
+
+ elif op[0] == "emit":
+ if len(op) > 2:
+ raise RuntimeError(_("transform ({0}) has 'emit' "
+ "operation syntax error").format(transform))
+
+ if len(op) == 1:
+ msg = ""
+ else:
+ msg = op[1]
+
+ def emit_func(action, matches, pkg_attrs, filename, lineno):
+ newmsg = substitute_values(msg, action, matches,
+ pkg_attrs, filename, lineno, quote=True)
+
+ if not newmsg.strip() or newmsg.strip()[0] == "#":
+ return (newmsg, action)
+ try:
+ return (pkg.actions.fromstr(newmsg), action)
+ except (pkg.actions.MalformedActionError,
+ pkg.actions.UnknownActionError,
+ pkg.actions.InvalidActionError) as e:
+ raise RuntimeError(e)
+
+ operation = emit_func
+
+ else:
+ raise RuntimeError(_("unknown transform operation '{0}'").format(op[0]))
+
+ transforms.append((types, attrdict, operation, filename, lineno, transform))
+
+def substitute_values(msg, action, matches, pkg_attrs, filename=None, lineno=None, quote=False):
+ """Substitute tokens in messages which can be expanded to the action's
+ attribute values."""
+
+ newmsg = ""
+ prevend = 0
+ for i in re.finditer("%\((.+?)\)|%\{(.+?)\}", msg):
+ m = i.string[slice(*i.span())]
+ assert m[1] in "({"
+ if m[1] == "(":
+ group = 1
+ elif m[1] == "{":
+ group = 2
+ d = {}
+ if ";" in i.group(group):
+ attrname, args = i.group(group).split(";", 1)
+ tokstream = shlex.shlex(args)
+ for tok in tokstream:
+ if tok == ";":
+ tok = tokstream.get_token()
+ eq = tokstream.get_token()
+ if eq == "" or eq == ";":
+ val = True
+ else:
+ assert(eq == "=")
+ val = tokstream.get_token()
+ if ('"', '"') == (val[0], val[-1]):
+ val = val[1:-1]
+ elif ("'", "'") == (val[0], val[-1]):
+ val = val[1:-1]
+ d[tok] = val
+ else:
+ attrname = i.group(group)
+
+ d.setdefault("quote", quote)
+
+ if d.get("noquote", None):
+ d["quote"] = False
+
+ if group == 2:
+ attr = pkg_attrs.get(attrname, d.get("notfound", None))
+ if attr and len(attr) == 1:
+ attr = attr[0]
+ else:
+ if attrname == "pkg.manifest.lineno":
+ attr = str(lineno)
+ elif attrname == "pkg.manifest.filename":
+ attr = str(filename)
+ elif attrname == "action.hash":
+ attr = getattr(action, "hash",
+ d.get("notfound", None))
+ elif attrname == "action.key":
+ attr = action.attrs.get(action.key_attr,
+ d.get("notfound", None))
+ elif attrname == "action.name":
+ attr = action.name
+ else:
+ attr = action.attrs.get(attrname,
+ d.get("notfound", None))
+
+ if attr is None:
+ raise RuntimeError(_("attribute '{0}' not found").format(
+ attrname))
+
+ def q(s):
+ if " " in s or "'" in s or "\"" in s or s == "":
+ if "\"" not in s:
+ return '"{0}"'.format(s)
+ elif "'" not in s:
+ return "'{0}'".format(s)
+ else:
+ return '"{0}"'.format(s.replace("\"", "\\\""))
+ else:
+ return s
+
+ if not d["quote"]:
+ q = lambda x: x
+
+ if isinstance(attr, six.string_types):
+ newmsg += msg[prevend:i.start()] + \
+ d.get("prefix", "") + q(attr) + d.get("suffix", "")
+ else:
+ newmsg += msg[prevend:i.start()] + \
+ d.get("sep", " ").join([
+ d.get("prefix", "") + q(v) + d.get("suffix", "")
+ for v in attr
+ ])
+ prevend = i.end()
+
+ newmsg += msg[prevend:]
+
+ # Now see if there are any backreferences to match groups
+ msg = newmsg
+ newmsg = ""
+ prevend = 0
+ backrefs = sum((
+ group
+ for group in (
+ match.groups()
+ for match in matches
+ if match.groups()
+ )
+ ), (None,))
+ for i in re.finditer(r"%<\d>", msg):
+ ref = int(i.string[slice(*i.span())][2:-1])
+
+ if ref == 0 or ref > len(backrefs) - 1:
+ raise RuntimeError(_("no match group {group:d} "
+ "(max {maxgroups:d})").format(
+ group=ref, maxgroups=len(backrefs) - 1))
+ if backrefs[ref] is None:
+ raise RuntimeError(_("Error\nInvalid backreference: "
+ "%<{ref}> refers to an unmatched string"
+ ).format(ref=ref))
+ newmsg += msg[prevend:i.start()] + backrefs[ref]
+ prevend = i.end()
+
+ newmsg += msg[prevend:]
+ return newmsg
+
+def attrval_as_list(attrdict, key):
+ """Return specified attribute as list;
+ an empty list if no such attribute exists"""
+ if key not in attrdict:
+ return []
+ val = attrdict[key]
+ if not isinstance(val, list):
+ val = [val]
+ return val
+
+class PkgAction(pkg.actions.generic.Action):
+ name = "pkg"
+ def __init__(self, attrs):
+ self.attrs = attrs
+
+def apply_transforms(transforms, action, pkg_attrs, verbose, act_filename,
+ act_lineno):
+ """Apply all transforms to action, returning modified action
+ or None if action is dropped"""
+ comments = []
+ newactions = []
+ if verbose:
+ comments.append("# Action: {0}".format(action))
+ for types, attrdict, operation, filename, lineno, transform in transforms:
+ if action is None:
+ action = PkgAction(pkg_attrs)
+ # skip if types are specified and none match
+ if types and action.name not in types:
+ continue
+ # skip if some attrs don't exist
+ if set(attrdict.keys()) - set(action.attrs.keys()):
+ continue
+
+ # Check to make sure all matching attrs actually match. The
+ # order is effectively arbitrary, since they come from a dict.
+ matches = [
+ attrdict[key].match(attrval)
+ for key in attrdict
+ for attrval in attrval_as_list(action.attrs, key)
+ ]
+
+ if not all(matches):
+ continue
+
+ s = transform[11:transform.index("->")]
+ # Map each pattern to its position in the original match string.
+ matchorder = {}
+ for attr, match in six.iteritems(attrdict):
+ # Attributes might be quoted even if they don't need it,
+ # and lead to a mis-match. These three patterns are all
+ # safe to try. If we fail to find the match expression,
+ # it's probably because it used different quoting rules
+ # than the action code does, or from these three rules.
+ # It might very well be okay, so we go ahead, but these
+ # oddly quoted patterns will sort at the beginning, and
+ # backref matching may be off.
+ matchorder[match.pattern] = -1
+ for qs in ("{0}={1}", "{0}=\"{1}\"", "{0}='{1}'"):
+ pos = s.find(qs.format(attr, match.pattern))
+ if pos != -1:
+ matchorder[match.pattern] = pos
+ break
+
+ # Then sort the matches list by those positions.
+ matches.sort(key=lambda x: matchorder[x.re.pattern])
+
+ # time to apply transform operation
+ try:
+ if verbose:
+ orig_attrs = action.attrs.copy()
+ action = operation(action, matches, pkg_attrs,
+ act_filename, act_lineno)
+ except RuntimeError as e:
+ raise RuntimeError("Transform specified in file {0}, line {1} reports {2}".format(
+ filename, lineno, e))
+ if isinstance(action, tuple):
+ newactions.append(action[0])
+ action = action[1]
+ if verbose:
+ if not action or \
+ not isinstance(action, six.string_types) and \
+ orig_attrs != action.attrs:
+ comments.append("# Applied: {0} (file {1} line {2})".format(
+ transform, filename, lineno))
+ comments.append("# Result: {0}".format(action))
+ if not action or isinstance(action, six.string_types):
+ break
+
+ # Any newly-created actions need to have the transforms applied, too.
+ newnewactions = []
+ for act in newactions:
+ if not isinstance(act, six.string_types):
+ c, al = apply_transforms(transforms, act, pkg_attrs,
+ verbose, act_filename, act_lineno)
+ if c:
+ comments.append(c)
+ newnewactions += [a for a in al if a is not None]
+ else:
+ newnewactions.append(act)
+
+ if len(comments) == 1:
+ comments = []
+
+ if action and action.name != "pkg":
+ return (comments, [action] + newnewactions)
+ else:
+ return (comments, [None] + newnewactions)
+
+
+def searching_open(filename, includes, try_cwd=False):
+ """ implement include hierarchy """
+
+ if filename.startswith("/") or try_cwd == True and \
+ os.path.exists(filename):
+ try:
+ return filename, open(filename)
+ except IOError as e:
+ raise RuntimeError(_("Cannot open file: {0}").format(e))
+
+ for i in includes:
+ f = os.path.join(i, filename)
+ if os.path.exists(f):
+ try:
+ return f, open(f)
+ except IOError as e:
+ raise RuntimeError(_("Cannot open file: {0}").format(e))
+
+ raise RuntimeError(_("File not found: \'{0}\'").format(filename))
+
+def apply_macros(s, macros):
+ """Apply macro subs defined on command line... keep applying
+ macros until no translations are found."""
+ while s and "$(" in s:
+ for key in macros.keys():
+ if key in s:
+ value = macros[key]
+ s = s.replace(key, value)
+ break # look for more substitutions
+ else:
+ break # no more substitutable tokens
+ return s
+
+def read_file(tp, ignoreincludes, transforms, macros, printinfo, includes,
+ error_print_cb=None):
+ """ return the lines in the file as a list of tuples containing
+ (line, filename, line number); handle continuation and <include "path">
+ """
+ ret = []
+ filename, f = tp
+
+ accumulate = ""
+ for lineno, line in enumerate(f):
+ lineno = lineno + 1 # number from 1
+ line = line.strip()
+ if not line: # preserve blanks
+ ret.append((line, filename, lineno))
+ continue
+ if line.endswith("\\"):
+ accumulate += line[0:-1]
+ continue
+ elif accumulate:
+ line = accumulate + line
+ accumulate = ""
+
+ if line:
+ line = apply_macros(line, macros)
+
+ line = line.strip()
+
+ if not line:
+ continue
+
+ try:
+ if line.startswith("<") and line.endswith(">"):
+ if line.startswith("<include"):
+ if not ignoreincludes:
+ line = line[1:-1]
+ line = line[7:].strip()
+ line = line.strip('"')
+ ret.extend(read_file(
+ searching_open(line, includes,
+ try_cwd=True),
+ ignoreincludes,
+ transforms, macros,
+ printinfo, includes,
+ error_print_cb))
+ else:
+ ret.append((line, filename, lineno))
+ elif line.startswith("<transform"):
+ line = line[1:-1]
+ add_transform(transforms, printinfo,
+ line, filename, lineno)
+ else:
+ raise RuntimeError(
+ _("unknown command {0}").format(
+ line))
+ else:
+ ret.append((line, filename, lineno))
+ except RuntimeError as e:
+ if error_print_cb:
+ error_print_cb(_("File {file}, line {line:d}: "
+ "{exception}").format(file=filename,
+ line=lineno,
+ exception=e),
+ exitcode=None)
+ raise RuntimeError("<included from>")
+
+ return ret
+
+def process_error(msg, error_cb=None):
+ """Print the error message or raise the actual exception if no
+ error printing callback specified."""
+
+ if error_cb:
+ error_cb(msg)
+ else:
+ raise
+
+def process_mog(file_args, ignoreincludes, verbose, includes, macros,
+ printinfo, output, error_cb=None, sys_supply_files=[]):
+ """Entry point for mogrify logic.
+ file_args: input files to be mogrified. If not provided, use stdin
+ instead.
+
+ ingoreincludes: whether to ignore <include ...> directives in input
+ files.
+
+ verbose: whether to include verbose action processing information
+ in mogrify output. Useful for debug.
+
+ includes: a list of directory paths used for searching include files.
+
+ macros: a list of macros for substitution.
+
+ printinfo: used to collect a list print info along processing. Could be
+ empty initially.
+
+ output: used to collect mogrify output. Empty initially.
+
+ error_cb: used to supply a error printing callback.
+
+ sys_supply_files: used for other systems or modules to supply
+ additional input files.
+ """
+
+ transforms = []
+ try:
+ if file_args:
+ infiles = [ searching_open(f, includes,
+ try_cwd=True) for f in file_args ]
+ else:
+ infiles = [("<stdin>", sys.stdin)]
+ if sys_supply_files:
+ infiles.extend([searching_open(f, includes,
+ try_cwd=True) for f in sys_supply_files])
+ except RuntimeError as e:
+ process_error(_("Error processing input arguments: {0}"
+ ).format(e), error_cb)
+
+ try:
+ lines = []
+ for f in infiles:
+ lines.extend(read_file(f, ignoreincludes,
+ transforms, macros, printinfo, includes, error_cb))
+ lines.append((None, f[0], None))
+ except RuntimeError as e:
+ raise
+
+ pkg_attrs = {}
+ for line, filename, lineno in lines:
+ if line is None:
+ if "pkg.fmri" in pkg_attrs:
+ comment, a = apply_transforms(transforms,
+ None, pkg_attrs,
+ verbose, filename, lineno)
+ output.append((comment, a, None))
+ pkg_attrs = {}
+ continue
+
+ if not line or line.startswith("#") or line.startswith("<"):
+ output.append(([line], [], None))
+ continue
+
+ if line.startswith("$("): #prepended unexpanded macro
+ # doesn't handle nested macros
+ eom = line.index(")") + 1
+ prepended_macro = line[0:eom]
+ line = line[eom:]
+ else:
+ prepended_macro = None
+
+ try:
+ act = pkg.actions.fromstr(line)
+ except (pkg.actions.MalformedActionError,
+ pkg.actions.UnknownActionError,
+ pkg.actions.InvalidActionError) as e:
+ process_error("File {0} line {1:d}: {2}".format(
+ filename, lineno, e), error_cb)
+ try:
+ if act.name == "set":
+ name = act.attrs["name"]
+ value = act.attrs["value"]
+ if isinstance(value, six.string_types):
+ pkg_attrs.setdefault(name, []).append(value)
+ else:
+ pkg_attrs.setdefault(name, []).extend(value)
+ comment, a = apply_transforms(transforms, act,
+ pkg_attrs, verbose, filename, lineno)
+ output.append((comment, a, prepended_macro))
+ except RuntimeError as e:
+ process_error("File {0} line {1:d}: {2}".format(
+ filename, lineno, e), error_cb)
--- a/src/pkg/manifests/package:pkg.p5m Tue Oct 06 15:15:19 2015 -0700
+++ b/src/pkg/manifests/package:pkg.p5m Thu Oct 08 05:30:35 2015 +0000
@@ -181,6 +181,7 @@
file path=$(PYDIRVP)/pkg/manifest.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/mediator.py
file path=$(PYDIRVP)/pkg/misc.py pkg.depend.bypass-generate=.*six.*
+file path=$(PYDIRVP)/pkg/mogrify.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/nrlock.py
file path=$(PYDIRVP)/pkg/p5i.py pkg.depend.bypass-generate=.*six.*
file path=$(PYDIRVP)/pkg/p5p.py pkg.depend.bypass-generate=.*six.*
--- a/src/po/POTFILES.in Tue Oct 06 15:15:19 2015 -0700
+++ b/src/po/POTFILES.in Thu Oct 08 05:30:35 2015 +0000
@@ -57,6 +57,7 @@
modules/manifest.py
modules/mediator.py
modules/misc.py
+modules/mogrify.py
modules/p5i.py
modules/p5p.py
modules/p5s.py
--- a/src/pull.py Tue Oct 06 15:15:19 2015 -0700
+++ b/src/pull.py Thu Oct 08 05:30:35 2015 +0000
@@ -24,6 +24,7 @@
# Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved.
#
+from __future__ import print_function
import calendar
import errno
import getopt
@@ -36,6 +37,7 @@
import traceback
import warnings
+import pkg.actions as actions
import pkg.catalog as catalog
import pkg.client.progress as progress
import pkg.fmri
@@ -45,6 +47,7 @@
import pkg.client.publisher as publisher
import pkg.client.transport.transport as transport
import pkg.misc as misc
+import pkg.mogrify as mog
import pkg.p5p
import pkg.pkgsubprocess as subprocess
import pkg.publish.transaction as trans
@@ -95,7 +98,8 @@
msg(_("""\
Usage:
pkgrecv [-aknrv] [-s src_uri] [-d (path|dest_uri)] [-c cache_dir]
- [-m match] [--raw] [--key src_key --cert src_cert]
+ [-m match] [--mog-file file_path ...] [--raw]
+ [--key src_key --cert src_cert]
[--dkey dest_key --dcert dest_cert]
(fmri|pattern) ...
pkgrecv [-s src_repo_uri] --newest
@@ -157,6 +161,13 @@
Cloning will leave the destination repository altered in
case of an error.
+ --mog-file Specifies the path to a file containing pkgmogrify(1)
+ transforms to be applied to every package before it is
+ copied to the destination. A path of '-' can be
+ specified to use stdin. This option can be specified
+ multiple times. This option can not be combined with
+ --clone.
+
--newest List the most recent versions of the packages available
from the specified repository and exit. (All other
options except -s will be ignored.)
@@ -392,6 +403,7 @@
cert = None
dkey = None
dcert = None
+ mog_files = []
publishers = []
clone = False
verbose = False
@@ -410,8 +422,8 @@
try:
opts, pargs = getopt.getopt(sys.argv[1:], "ac:D:d:hkm:np:rs:v",
- ["cert=", "key=", "dcert=", "dkey=", "newest", "raw",
- "debug=", "clone"])
+ ["cert=", "key=", "dcert=", "dkey=", "mog-file=", "newest",
+ "raw", "debug=", "clone"])
except getopt.GetoptError as e:
usage(_("Illegal option -- {0}").format(e.opt))
@@ -425,7 +437,7 @@
elif opt == "-d":
target = arg
elif opt == "-D" or opt == "--debug":
- if arg in ["plan", "transport"]:
+ if arg in ["plan", "transport", "mogrify"]:
key = arg
value = "True"
else:
@@ -463,6 +475,8 @@
src_uri = arg
elif opt == "-v":
verbose = True
+ elif opt == "--mog-file":
+ mog_files.append(arg)
elif opt == "--newest":
list_newest = True
elif opt == "--raw":
@@ -512,6 +526,9 @@
if publishers and not clone:
usage(_("-p can only be used with --clone.\n"))
+ if mog_files and clone:
+ usage(_("--mog-file can not be used with --clone.\n"))
+
incoming_dir = tempfile.mkdtemp(dir=temp_root,
prefix=global_settings.client_name + "-")
tmpdirs.append(incoming_dir)
@@ -541,6 +558,7 @@
args += (publishers,)
return clone_repo(*args)
+ args += (mog_files,)
if archive:
# Retrieving package data for archival requires a different mode
# of operation so gets its own routine. Notably, it requires
@@ -601,13 +619,129 @@
return matches
+def __mog_helper(mog_files, fmri, mpathname):
+ """Helper routine for mogrifying manifest. Precondition: mog_files
+ has at least one element."""
+
+ ignoreincludes = False
+ mog_verbose = False
+ includes = []
+ macros = {}
+ printinfo = []
+ output = []
+ line_buffer = []
+
+ # Set mogrify in verbose mode for debugging.
+ if DebugValues.get_value("mogrify"):
+ mog_verbose = True
+
+ # Take out "-" symbol. If the only one element is "-", input_files
+ # will be empty, then stdin is used. If more than elements, the
+ # effect of "-" will be ignored, and system only takes input from
+ # real files provided.
+ input_files = [mf for mf in mog_files if mf != "-"]
+ mog.process_mog(input_files, ignoreincludes, mog_verbose, includes,
+ macros, printinfo, output, error_cb=None,
+ sys_supply_files=[mpathname])
+
+ try:
+ for p in printinfo:
+ print("{0}".format(p), file=sys.stdout)
+ except IOError as e:
+ error(_("Cannot write extra data {0}").format(e))
+
+ # Collect new contents of mogrified manifest.
+ # emitted tracks output so far to avoid duplicates.
+ emitted = set()
+ for comment, actionlist, prepended_macro in output:
+ if comment:
+ for l in comment:
+ line_buffer.append("{0}"
+ .format(l))
+
+ for i, action in enumerate(actionlist):
+ if action is None:
+ continue
+ if prepended_macro is None:
+ s = "{0}".format(action)
+ else:
+ s = "{0}{1}".format(
+ prepended_macro, action)
+ # The first action is the original
+ # action and should be collected;
+ # later actions are all emitted and
+ # should only be collected if not
+ # duplicates.
+ if i == 0:
+ line_buffer.append(s)
+ elif s not in emitted:
+ line_buffer.append(s)
+ emitted.add(s)
+
+ # Print the mogrified result for debugging purpose.
+ if mog_verbose:
+ print("{0}".format("Mogrified manifest for {0}: (subject to "
+ "validation)\n".format(fmri.get_fmri(anarchy=True,
+ include_scheme=False))), file=sys.stdout)
+ for line in line_buffer:
+ print("{0}".format(line), file=sys.stdout)
+
+ # Find the mogrified fmri. Make it equal to the old fmri first just
+ # to make sure it always has a value.
+ nfmri = fmri
+ new_lines = []
+ for al in line_buffer:
+ if not al.strip():
+ continue
+
+ if al.strip().startswith("#"):
+ continue
+ try:
+ act = actions.fromstr(al)
+ except Exception as e:
+ # If any exception encoutered here, that means the
+ # action is corrupted with mogrify.
+ abort(e)
+ if act.name == "set" and act.attrs["name"] == "pkg.fmri":
+ # Construct mogrified new fmri.
+ try:
+ nfmri = pkg.fmri.PkgFmri(
+ act.attrs["value"])
+ except Exception as ex:
+ abort("Invalid FMRI for set action:\n{0}"
+ .format(al))
+ if hasattr(act, "hash"):
+ # Drop the signature.
+ if act.name == "signature":
+ continue
+ # Check whether new contents such as files and licenses
+ # was added via mogrify. This should not be allowed.
+ if "pkg.size" not in act.attrs:
+ abort("Adding new hashable content {0} is not "
+ "allowed.".format(act.hash))
+ elif act.name == "depend":
+ try:
+ fmris = act.attrs["fmri"]
+ if not isinstance(fmris, list):
+ fmris = [fmris]
+ for f in fmris:
+ pkg.fmri.PkgFmri(f)
+ except Exception as ex:
+ abort("Invalid FMRI(s) for depend action:\n{0}"
+ .format(al))
+ new_lines.append(al)
+
+ return (nfmri, new_lines)
+
def archive_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
keep_compresed, raw, recursive, dry_run, verbose, dest_xport_cfg, src_uri,
- dkey, dcert):
+ dkey, dcert, mog_files):
"""Retrieve source package data completely and then archive it."""
global cache_dir, download_start, xport, xport_cfg
-
+ do_mog = False
+ if mog_files:
+ do_mog = True
target = os.path.abspath(target)
if os.path.exists(target):
error(_("Target archive '{0}' already "
@@ -661,6 +795,7 @@
tracker.manifest_fetch_start(npkgs)
+ fmappings = {}
good_matches = []
for f in matches:
try:
@@ -668,8 +803,42 @@
except apx.InvalidPackageErrors as e:
invalid_manifests.extend(e.errors)
continue
- good_matches.append(f)
- getb, getf, arcb, arccb = get_sizes(m)
+
+ nf = f
+ if do_mog:
+ nf, line_buffer = __mog_helper(mog_files,
+ f, m.pathname)
+ try:
+ # Create mogrified manifest.
+ # Remove the old manifest cache first.
+ oldpkgdir = xport_cfg.get_pkg_dir(f)
+ oldpkg_parentdir = os.path.dirname(
+ oldpkgdir)
+ shutil.rmtree(oldpkgdir)
+ # If the parent directory become empty,
+ # remove it as well.
+ if not os.listdir(oldpkg_parentdir):
+ shutil.rmtree(oldpkg_parentdir)
+ nm = pkg.manifest.FactoredManifest(nf,
+ xport_cfg.get_pkg_dir(nf),
+ contents="\n".join(
+ line_buffer))
+ except EnvironmentError as e:
+ raise apx._convert_error(e)
+ except Exception as e:
+ abort(_("Creating mogrified "
+ "manifest failed: {0}"
+ ).format(str(e)))
+ else:
+ # Use the original manifest if no
+ # mogrify is done.
+ nm = m
+
+ # Store a mapping between new fmri and new manifest for
+ # future use.
+ fmappings[nf] = nm
+ good_matches.append(nf)
+ getb, getf, arcb, arccb = get_sizes(nm)
get_bytes += getb
get_files += getf
@@ -681,7 +850,7 @@
# Also include the the manifest file itself in the
# amount of bytes to archive.
try:
- fs = os.stat(m.pathname)
+ fs = os.stat(nm.pathname)
arc_bytes += fs.st_size
except EnvironmentError as e:
raise apx._convert_error(e)
@@ -713,8 +882,8 @@
s[1].rjust(rjust_value)))
msg(_("\nPackages to archive:"))
- for f in sorted(matches):
- fmri = f.get_fmri(anarchy=True,
+ for nf in sorted(matches):
+ fmri = nf.get_fmri(anarchy=True,
include_scheme=False)
msg(fmri)
msg()
@@ -729,23 +898,23 @@
total_processed = len(matches)
continue
- for f in matches:
- tracker.download_start_pkg(f)
- pkgdir = xport_cfg.get_pkg_dir(f)
+ for nf in matches:
+ tracker.download_start_pkg(nf)
+ pkgdir = xport_cfg.get_pkg_dir(nf)
mfile = xport.multi_file_ni(src_pub, pkgdir,
progtrack=tracker)
- m = get_manifest(f, xport_cfg)
- add_hashes_to_multi(m, mfile)
+ nm = fmappings[nf]
+ add_hashes_to_multi(nm, mfile)
if mfile:
download_start = True
mfile.wait_files()
if not dry_run:
- archive_list.append((f, m.pathname, pkgdir))
+ archive_list.append((nf, nm.pathname, pkgdir))
# Nothing more to do for this package.
- tracker.download_end_pkg(f)
+ tracker.download_end_pkg(nf)
total_processed += 1
tracker.download_done()
@@ -1127,7 +1296,7 @@
def transfer_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
keep_compressed, raw, recursive, dry_run, verbose, dest_xport_cfg, src_uri,
- dkey, dcert):
+ dkey, dcert, mog_files):
"""Retrieve source package data and optionally republish it as each
package is retrieved.
"""
@@ -1138,6 +1307,10 @@
any_matched = []
invalid_manifests = []
total_processed = 0
+ do_mog = False
+
+ if mog_files:
+ do_mog = True
for src_pub in xport_cfg.gen_publishers():
tracker = get_tracker()
@@ -1214,10 +1387,6 @@
xport_cfg.pkg_root = basedir
dest_xport_cfg.pkg_root = basedir
- if republish:
- targ_cat = fetch_catalog(targ_pub, tracker,
- dest_xport, True)
-
matches = get_matches(src_pub, tracker, xport, pargs,
any_unmatched, any_matched, all_versions, all_timestamps,
recursive)
@@ -1245,30 +1414,106 @@
tracker.manifest_fetch_start(npkgs)
pkgs_to_get = []
+ new_targ_cats = {}
+ new_targ_pubs = {}
+ fmappings = {}
+
while matches:
f = matches.pop()
- if republish and targ_cat.get_entry(f):
- tracker.manifest_fetch_progress(completion=True)
- continue
try:
m = get_manifest(f, xport_cfg)
except apx.InvalidPackageErrors as e:
invalid_manifests.extend(e.errors)
continue
- pkgs_to_get.append(f)
+
+ nf = f
+ if do_mog:
+ nf, line_buffer = __mog_helper(mog_files,
+ f, m.pathname)
- getb, getf, sendb, sendcb = get_sizes(m)
- get_bytes += getb
- get_files += getf
+ # Figure out whether the package is already in
+ # the target repository or not.
+ if republish:
+ # Check whether the fmri already exists in the
+ # target repository.
+ if nf.publisher not in new_targ_cats:
+ newpub = transport.setup_publisher(
+ target, nf.publisher, dest_xport,
+ dest_xport_cfg, ssl_key=dkey,
+ ssl_cert=dcert)
+ # If no publisher transport
+ # established. That means it is a
+ # remote host. set remote prefix
+ # equal to True.
+ if not newpub:
+ newpub = transport.setup_publisher(
+ target, nf.publisher,
+ dest_xport, dest_xport_cfg,
+ remote_prefix=True,
+ ssl_key=dkey,
+ ssl_cert=dcert)
+ new_targ_pubs[nf.publisher] = newpub
+ newcat = fetch_catalog(newpub, tracker,
+ dest_xport, True)
+ new_targ_cats[nf.publisher] = newcat
+ if newcat.get_entry(nf):
+ tracker.manifest_fetch_progress(
+ completion=True)
+ continue
+ # If we already have a catalog in the
+ # cache, use it.
+ elif new_targ_cats[nf.publisher].get_entry(nf):
+ tracker.manifest_fetch_progress(
+ completion=True)
+ continue
+
+ if do_mog:
+ # We have examined which packge to
+ # republish. Then we need store the
+ # mogrified manifest for future use.
+ try:
+ # Create mogrified manifest.
+ # Remove the old manifest cache first.
+ oldpkgdir = xport_cfg.get_pkg_dir(f)
+ oldpkg_parentdir = os.path.dirname(
+ oldpkgdir)
+ shutil.rmtree(oldpkgdir)
+ # If the parent directory become empty,
+ # remove it as well.
+ if not os.listdir(oldpkg_parentdir):
+ shutil.rmtree(oldpkg_parentdir)
+ nm = pkg.manifest.FactoredManifest(nf,
+ xport_cfg.get_pkg_dir(nf),
+ contents="\n".join(
+ line_buffer))
+ except EnvironmentError as e:
+ raise apx._convert_error(e)
+ except Exception as e:
+ abort(_("Creating mogrified "
+ "manifest failed: {0}"
+ ).format(str(e)))
+ else:
+ # Use the original manifest if no
+ # mogrify is done.
+ nm = m
+
+ getb, getf, sendb, sendcb = get_sizes(nm)
if republish:
# For now, normal republication always uses
# uncompressed data as already compressed data
# is not supported for publication.
send_bytes += sendb
+ # Store a mapping between new fmri and new manifest for
+ # future use.
+ fmappings[nf] = nm
+ pkgs_to_get.append(nf)
+
+ get_bytes += getb
+ get_files += getf
+
tracker.manifest_fetch_progress(completion=True)
tracker.manifest_fetch_done()
-
# Next, retrieve and store the content for each package.
tracker.republish_set_goal(len(pkgs_to_get), get_bytes,
send_bytes)
@@ -1306,47 +1551,41 @@
processed = 0
pkgs_to_get = sorted(pkgs_to_get)
- for f in pkgs_to_get:
- tracker.republish_start_pkg(f)
- pkgdir = xport_cfg.get_pkg_dir(f)
+ for nf in pkgs_to_get:
+ tracker.republish_start_pkg(nf)
+ # Processing republish.
+ nm = fmappings[nf]
+ pkgdir = xport_cfg.get_pkg_dir(nf)
mfile = xport.multi_file_ni(src_pub, pkgdir,
not keep_compressed, tracker)
- m = get_manifest(f, xport_cfg)
- add_hashes_to_multi(m, mfile)
-
+ add_hashes_to_multi(nm, mfile)
if mfile:
download_start = True
mfile.wait_files()
if not republish:
# Nothing more to do for this package.
- tracker.republish_end_pkg(f)
+ tracker.republish_end_pkg(nf)
continue
- # Get first line of original manifest so that inclusion
- # of the scheme can be determined.
use_scheme = True
- contents = get_manifest(f, xport_cfg, contents=True)
- if contents.splitlines()[0].find("pkg:/") == -1:
+ # Check whether to include scheme based on new
+ # manifest.
+ if not any(a.name == "set" and str(a).find("pkg:/") >= 0
+ for a in nm.gen_actions()):
use_scheme = False
- pkg_name = f.get_fmri(include_scheme=use_scheme)
- pkgdir = xport_cfg.get_pkg_dir(f)
+ pkg_name = nf.get_fmri(include_scheme=use_scheme)
+ # Use the new fmri for constructing a transaction id.
# This is needed so any previous failures for a package
# can be aborted.
- trans_id = get_basename(f)
-
- if not targ_pub:
- targ_pub = transport.setup_publisher(target,
- src_pub.prefix, dest_xport, dest_xport_cfg,
- remote_prefix=True, ssl_key=dkey,
- ssl_cert=dcert)
-
+ trans_id = get_basename(nf)
try:
t = trans.Transaction(target, pkg_name=pkg_name,
trans_id=trans_id, xport=dest_xport,
- pub=targ_pub, progtrack=tracker)
+ pub=new_targ_pubs[nf.publisher],
+ progtrack=tracker)
# Remove any previous failed attempt to
# to republish this package.
@@ -1357,7 +1596,7 @@
pass
t.open()
- for a in m.gen_actions():
+ for a in nm.gen_actions():
if a.name == "set" and \
a.attrs.get("name", "") in ("fmri",
"pkg.fmri"):
@@ -1369,10 +1608,12 @@
if hasattr(a, "hash"):
fname = os.path.join(pkgdir,
a.hash)
+
a.data = lambda: open(fname,
"rb")
t.add(a)
- if a.name == "signature":
+ if a.name == "signature" and \
+ not do_mog:
# We always store content in the
# repository by the least-
# preferred hash.
@@ -1403,7 +1644,7 @@
misc.makedirs(dest_xport_cfg.incoming_root)
processed += 1
- tracker.republish_end_pkg(f)
+ tracker.republish_end_pkg(nf)
tracker.republish_done()
tracker.reset()
@@ -1499,6 +1740,14 @@
__ret = 99
else:
__ret = 1
+ except pkg.fmri.IllegalFmri as _e:
+ error(_e)
+ try:
+ cleanup()
+ except:
+ __ret = 99
+ else:
+ __ret = 1
except:
traceback.print_exc()
error(misc.get_traceback_message())
--- a/src/tests/cli/t_pkgrecv.py Tue Oct 06 15:15:19 2015 -0700
+++ b/src/tests/cli/t_pkgrecv.py Thu Oct 08 05:30:35 2015 +0000
@@ -30,9 +30,12 @@
import pkg5unittest
import os
+import simplejson as json
+import six
import pkg.catalog as catalog
import pkg.config as cfg
import pkg.client.pkgdefs as pkgdefs
+import pkg.client.transport.transport as transport
import pkg.fmri as fmri
import pkg.manifest as manifest
import pkg.misc as misc
@@ -65,6 +68,36 @@
close
"""
+ filetrans110 = """
+ open pkg:/[email protected],5.11-0
+ add file tmp/bronze1 mode=0444 owner=root group=bin path=/etc/bronze1
+ close
+ """
+
+ filetrans210 = """
+ open pkg:/[email protected],5.11-0
+ add file tmp/bronze1 mode=0444 owner=root group=bin path=/etc/bronze1
+ close
+ """
+
+ filetrans310 = """
+ open pkg:/[email protected],5.11-0
+ add file tmp/bronze1 mode=0444 owner=root group=bin path=/etc/bronze1
+ close
+ """
+
+ filetrans410 = """
+ open pkg:/[email protected],5.11-0
+ add file tmp/bronzeA1 mode=0444 owner=root group=bin path=/etc/bronze1
+ close
+ """
+
+ signature10 = """
+ open pkg:/[email protected],5.11-0
+ add signature tmp/extrafile value=d2ff algorithm=sha256 variant.arch=i386
+ close
+ """
+
tree10 = """
open [email protected],5.11-0
add depend type=require-any [email protected] [email protected]
@@ -132,18 +165,19 @@
misc_files = [ "tmp/bronzeA1", "tmp/bronzeA2", "tmp/bronze1",
"tmp/bronze2", "tmp/copyright2", "tmp/copyright3", "tmp/libc.so.1",
- "tmp/sh"]
+ "tmp/sh", "tmp/extrafile"]
def setUp(self):
- """ Start two depots.
+ """ Start six depots.
depot 1 gets foo and moo, depot 2 gets foo and bar
depot1 is mapped to publisher test1 (preferred)
depot2 is mapped to publisher test1 (alternate)
- depot3 and depot4 are scratch depots"""
+ depot3 and depot4 are scratch depots
+ depot5 and depot6 are for testing mogrify."""
# This test suite needs actual depots.
pkg5unittest.ManyDepotTestCase.setUp(self, ["test1", "test1",
- "test2", "test2"], start_depots=True)
+ "test2", "test2", "test1", "test1"], start_depots=True)
self.make_misc_files(self.misc_files)
@@ -164,9 +198,49 @@
self.durl2 = self.dcs[2].get_depot_url()
self.tempdir = tempfile.mkdtemp(dir=self.test_root)
+ self.mogdir = tempfile.mkdtemp(dir=self.test_root)
+
self.durl3 = self.dcs[3].get_depot_url()
self.durl4 = self.dcs[4].get_depot_url()
+ self.durl5 = self.dcs[5].get_depot_url()
+ self.durl6 = self.dcs[6].get_depot_url()
+ self.dpath5 = self.dcs[5].get_repodir()
+ self.dpath6 = self.dcs[6].get_repodir()
+ self.test_mog = self.pkgsend_bulk(self.durl5,
+ (self.filetrans110, self.filetrans210, self.filetrans310,
+ self.filetrans410, self.signature10))
+
+ self.transforms = {
+ "pub_change": "<transform set name=pkg.fmri -> edit value (pkg://).*?(/.*) \\\\1testpub\\\\2>",
+ "name_change": "<transform set name=pkg.fmri -> edit value (pkg://.*?/).*?(@.*) \\\\1testname\\\\2>",
+ "add_file": "<transform file path=etc/bronze1 -> emit file {0} path=/etc/bronze2 owner=root group=bin mode=0755>".format(os.path.join(self.test_root, self.misc_files[8])),
+ "add_none_file": "<transform file path=etc/bronze1 -> emit file tmp/nonono_such_file path=/etc/bronze2 owner=root group=bin mode=0755>",
+ "drop_file": "<transform file path=etc/bronze1 -> drop>",
+ "file_path_change": "<transform file path=etc/bronze1 -> edit path .* /opt/bronze2>",
+ "add_invalid_act": "<transform file path=etc/bronze1 -> emit invalid_action name=invalid value=invalid>",
+ "add_invalid_act2": "<transform file path=etc/bronze1 -> emit depend name=invalid value=invalid>",
+ "add_invalid_act3": "<transform file path=etc/bronze1 -> emit depend fmri=*$# type=require>",
+ "add_valid_act": "<transform file path=etc/bronze1 -> emit depend [email protected] type=require>"
+ }
+ # Map the transform names to path names
+ xformpaths = dict((
+ (name, os.path.join(self.test_root, "transform_{0}".format(i)))
+ for i, name in enumerate(six.iterkeys(self.transforms))
+ ))
+
+ # Now that we have path names, we can use the expandos in the
+ # transform contents to embed those pathnames, and write the
+ # transform files out.
+ for name, path in six.iteritems(xformpaths):
+ f = open(path, "wb")
+ self.transforms[name] = self.transforms[name].format(**xformpaths)
+ f.write(self.transforms[name])
+ f.close()
+
+ self.transform_contents = self.transforms
+ self.transforms = xformpaths
+
@staticmethod
def get_repo(uri):
parts = urlparse(uri, "file", allow_fragments=0)
@@ -179,6 +253,33 @@
"repository's configuration data is not "
"valid:\n{0}").format(e))
+ def __get_mf_path(self, fmri_str, dc_num, pub=None, repo_path=None):
+ """Given an FMRI, return the path to its manifest in our
+ repository."""
+
+ usepub = "test"
+ if pub:
+ usepub = pub
+ if not repo_path:
+ repo_path = self.dcs[dc_num].get_repodir()
+ path_comps = [repo_path, "publisher",
+ usepub, "pkg"]
+ pfmri = pkg.fmri.PkgFmri(fmri_str)
+ path_comps.append(pfmri.get_name())
+ path_comps.append(pfmri.get_link_path().split("@")[1])
+ return os.path.sep.join(path_comps)
+
+ def __get_manifest_contents(self, fmri_str, dc_num, pub=None,
+ repo_path=None):
+ """Given an FMRI, return the unsorted manifest contents from our
+ repository as a string."""
+
+ mpath = self.__get_mf_path(fmri_str, dc_num, pub=pub,
+ repo_path=repo_path)
+ mf = pkg.manifest.Manifest()
+ mf.set_content(pathname=mpath)
+ return mf.tostr_unsorted()
+
def test_0_opts(self):
"""Verify that various basic options work as expected and that
invalid options or option values return expected exit code."""
@@ -192,6 +293,15 @@
self.pkgrecv(self.durl1, "-d {0} [email protected]".format(
self.tempdir), exit=1)
+ self.pkgrecv(self.durl1, "-d {0} --mog-file fakefile --clone"
+ .format(self.dpath2), exit=2)
+
+ self.pkgrecv(self.durl1, "-d {0} --mog-file fakefile --a [email protected]"
+ .format(self.dpath2), exit=2)
+
+ self.pkgrecv(self.durl1, "-d {0} --mog-file ++ [email protected]"
+ .format(self.dpath2), exit=1)
+
# Test help.
self.pkgrecv(command="-h", exit=0)
@@ -422,6 +532,85 @@
"manifest")
self.assertTrue(os.path.isfile(mpath))
+ # Verify --mog-file option changes all package fmris into new
+ # publisher name. Also verify the manifest pkg.fmri value is
+ # changed correspondingly.
+ npath = tempfile.mkdtemp(dir=self.test_root)
+ self.create_repo(npath)
+ self.pkgrecv(self.durl1, "-r --mog-file {0} -d {1} {2}".format(
+ self.transforms["pub_change"], npath, bronze))
+ self.pkgrepo("verify -s {0}".format(npath))
+ self.pkgrepo("-s {0} list -F json".format(npath))
+ out_json = json.loads(self.output)
+ for elem in out_json:
+ self.assert_(elem["publisher"] == "testpub" and
+ "testpub" in elem["pkg.fmri"])
+ ma = self.__get_manifest_contents(elem["pkg.fmri"], 0,
+ pub="testpub", repo_path=npath)
+ self.assert_("pkg://testpub/" in ma)
+
+ # Verify again with --raw option.
+ self.pkgrecv(self.durl1, "-r --raw --mog-file {0} -d {1} -v "
+ "{2}".format(self.transforms["pub_change"],
+ npath, bronze))
+ xport, xport_cfg = transport.setup_transport()
+ xport_cfg.pkg_root = npath
+ pkgdir = xport_cfg.get_pkg_dir(bronze)
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("testpub" in mcontent)
+ with open(os.path.join(os.path.join(pkgdir, "manifest.set"))) \
+ as f:
+ ms = f.read()
+ self.assert_("testpub" in ms)
+ pkgdir = xport_cfg.get_pkg_dir(amber)
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("testpub" in mcontent)
+ with open(os.path.join(os.path.join(pkgdir, "manifest.set"))) \
+ as f:
+ ms = f.read()
+ self.assert_("testpub" in ms)
+
+ # Verify changing package name works.
+ npath2 = tempfile.mkdtemp(dir=self.test_root)
+ self.pkgrecv(self.durl1, "-r --raw --mog-file {0} -d {1} -v "
+ "{2}".format(self.transforms["name_change"],
+ npath2, bronze))
+ xport, xport_cfg = transport.setup_transport()
+ xport_cfg.pkg_root = npath2
+ bronze = self.published[4]
+ # Assert old path does not exist
+ oldpath = xport_cfg.get_pkg_dir(fmri.PkgFmri(bronze))
+ self.assert_(not os.path.exists(oldpath))
+ self.assert_(not os.path.exists(os.path.dirname(oldpath)))
+ bronze = bronze.replace("bronze", "testname")
+ bronze = fmri.PkgFmri(bronze, None)
+ pkgdir = xport_cfg.get_pkg_dir(bronze)
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("testname" in mcontent)
+ with open(os.path.join(os.path.join(pkgdir, "manifest.set"))) \
+ as f:
+ ms = f.read()
+ self.assert_("testname" in ms)
+
+ amber = self.published[1]
+ # Assert old path does not exist
+ oldpath = xport_cfg.get_pkg_dir(fmri.PkgFmri(amber))
+ self.assert_(not os.path.exists(oldpath))
+ self.assert_(not os.path.exists(os.path.dirname(oldpath)))
+ amber = amber.replace("amber", "testname")
+ amber = fmri.PkgFmri(amber, None)
+ pkgdir = xport_cfg.get_pkg_dir(amber)
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("testname" in mcontent)
+ with open(os.path.join(os.path.join(pkgdir, "manifest.set"))) \
+ as f:
+ ms = f.read()
+ self.assert_("testname" in ms)
+
def test_4_timever(self):
"""Verify that receiving with -m options work as expected."""
@@ -585,6 +774,16 @@
# This would fail before behavior fixed to skip existing pkgs.
self.pkgrecv(self.durl1, "-r -d file://{0} {1}".format(npath, f2))
+ npath = tempfile.mkdtemp(dir=self.test_root)
+ self.pkgsend("file://{0}".format(npath),
+ "create-repository --set-property publisher.prefix=testpub")
+ # Verify by changing publisher name, the second time run also
+ # does not fail with existing package error.
+ self.pkgrecv(self.durl1, "--mog-file {0} -d file://{1} {2}"
+ .format(self.transforms["pub_change"], npath, f))
+ self.pkgrecv(self.durl1, "--mog-file {0} -r -d file://{1} {2}"
+ .format(self.transforms["pub_change"], npath, f2))
+
def test_7_recv_multipublisher(self):
"""Verify that pkgrecv handles multi-publisher repositories as
expected."""
@@ -610,6 +809,23 @@
self.assertNotEqual(self.output.find("test1/amber"), -1)
self.assertNotEqual(self.output.find("test2/amber"), -1)
+ # Test using --mog-file to change publishers of packages from
+ # multiple publishers.
+ npath = tempfile.mkdtemp(dir=self.test_root)
+ self.create_repo(npath)
+ self.pkgrecv(self.durl1, "--mog-file {0} -d {1} [email protected] "
+ "[email protected]".format(self.transforms["pub_change"], npath))
+ self.pkgrepo("verify -s {0} --disable dependency"
+ .format(npath))
+ self.pkgrepo("-s {0} list -F json".format(npath))
+ out_json = json.loads(self.output)
+ for elem in out_json:
+ self.assert_(elem["publisher"] == "testpub" and
+ "testpub" in elem["pkg.fmri"])
+ ma = self.__get_manifest_contents(elem["pkg.fmri"], 0,
+ pub="testpub", repo_path=npath)
+ self.assert_("pkg://testpub/" in ma)
+
# Verify attempting to retrieve a non-existent package fails
# for a multi-publisher repository.
self.pkgrecv(self.durl3, "-d {0} nosuchpackage".format(self.durl4),
@@ -721,6 +937,50 @@
self.pkgrecv(self.durl3, "-n -a -d {0} \*".format(arc_path))
self.assertFalse(os.path.exists(arc_path))
+ #
+ # Verify that packages can be received from an archive to an
+ # archive.
+ #
+ arc_mog_path = os.path.join(self.test_root, "test_mog.p5p")
+ self.pkgrecv(self.durl1, "-a --mog-file {0} --mog-file {1} "
+ "-d {2} bronze@1".format(self.transforms["drop_file"],
+ self.transforms["pub_change"], arc_mog_path))
+
+ # Check for expected publishers.
+ arc = p5p.Archive(arc_mog_path, mode="r")
+ expected = set(["testpub"])
+ pubs = set(p.prefix for p in arc.get_publishers())
+ self.assertEqualDiff(expected, pubs)
+
+ # Check for expected package FMRIs.
+ bronze = self.published[2]
+ bronze = bronze.replace("test1", "testpub")
+ expected = set([bronze])
+ tmpdir = tempfile.mkdtemp(dir=self.test_root)
+ returned = []
+ for pfx in pubs:
+ catdir = os.path.join(tmpdir, pfx)
+ os.mkdir(catdir)
+ for part in ("catalog.attrs", "catalog.base.C"):
+ arc.extract_catalog1(part, catdir, pfx)
+
+ cat = catalog.Catalog(meta_root=catdir, read_only=True)
+ returned.extend(str(f) for f in cat.fmris())
+ fileObjs = []
+ allcontents = ""
+ for idx in arc.get_index():
+ tarf = arc.get_file(idx)
+ if tarf:
+ fileObjs.append(tarf)
+ allcontents += str(tarf.readlines())
+ # 3 files + 1 license + 1 manifest +
+ # 1 repo configuration.
+ self.assert_(len(fileObjs) == 6)
+ # etc/bronze1 has been dropped.
+ self.assert_("etc/bronze1" not in allcontents)
+ self.assertEqualDiff(expected, set(returned))
+ arc.close()
+
def test_9_dryruns(self):
"""Test that the dry run option to pkgrecv works as expected."""
@@ -1031,10 +1291,136 @@
self.assert_(fmri.PkgFmri(s).get_fmri(anarchy=True,
include_scheme=False) in self.output)
+ # Verify mogrify works by retrieving mogrified pkgs from the
+ # new target catalog.
+ self.pkgrecv(self.dpath1, "--mog-file {0} -d {1} -v \*".format(
+ self.transforms["pub_change"], self.tempdir))
+ self.assert_("target catalog 'testpub'" in self.output)
+ self.pkgrepo("verify -s {0}".format(self.tempdir))
+
# Test that output is correct if -n is not specified.
self.pkgrecv(self.dpath1, "-d {0} -v \*".format(self.tempdir))
self.assert_("dry-run" not in self.output)
+ def test_14_mog_manifest(self):
+ """Mogrify some contents in the manifest, and verify
+ republish behaviour."""
+
+ # Test with add an invalid action will fail.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans1"
+ .format(self.transforms["add_invalid_act"], self.durl6),
+ exit=1)
+ self.pkgrecv(self.durl5, "--raw --mog-file {0} -d {1} "
+ "-v filetrans1".format(self.transforms["add_invalid_act"],
+ self.durl6), exit=1)
+
+ # Test add a depend action without fmri attribute will fail.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans1"
+ .format(self.transforms["add_invalid_act2"], self.durl6),
+ exit=1)
+ self.pkgrecv(self.durl5, "--raw --mog-file {0} -d {1} "
+ "-v filetrans1".format(self.transforms["add_invalid_act2"],
+ self.durl6), exit=1)
+
+ # Test add a depend action with invalid fmri attribute will
+ # fail.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans1"
+ .format(self.transforms["add_invalid_act3"], self.durl6),
+ exit=1)
+ self.pkgrecv(self.durl5, "--raw --mog-file {0} -d {1} -v "
+ "filetrans1".format(self.transforms["add_invalid_act3"],
+ self.durl6), exit=1)
+
+ # Test with adding a non-existing file will fail.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans1"
+ .format(self.transforms["add_none_file"], self.durl6),
+ exit=1)
+ self.pkgrecv(self.durl5, "--raw --mog-file {0} -d {1} -v "
+ "filetrans1".format(self.transforms["add_none_file"],
+ self.durl6), exit=1)
+ self.assert_("not allowed" in self.errout)
+
+ # Test with adding an additional existing file
+ # (hashable content) should fail.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans1"
+ .format(self.transforms["add_file"], self.durl6), exit=1)
+ self.assert_("not allowed" in self.errout)
+
+ # Test with adding an depend action should succeed.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans1"
+ .format(self.transforms["add_valid_act"], self.durl6))
+ self.pkgrecv(self.durl5, "--raw --mog-file {0} -d {1} -v "
+ "filetrans1".format(self.transforms["add_valid_act"],
+ self.dpath6))
+ xport, xport_cfg = transport.setup_transport()
+ xport_cfg.pkg_root = self.dpath6
+ pkgdir = xport_cfg.get_pkg_dir(pkg.fmri.PkgFmri(
+ self.test_mog[0]))
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("depend" in mcontent)
+ self.assert_(os.path.exists(os.path.join(pkgdir,
+ "manifest.depend")))
+
+ # Drop the file.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} filetrans2"
+ .format(self.transforms["drop_file"], self.durl6))
+ self.pkgrecv(self.durl5, "-v --raw --mog-file {0} -d {1} "
+ "filetrans4".format(self.transforms["drop_file"],
+ self.dpath6))
+ pkgdir = xport_cfg.get_pkg_dir(pkg.fmri.PkgFmri(
+ self.test_mog[3]))
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("path" not in mcontent)
+ self.assert_(not os.path.exists(os.path.join(pkgdir,
+ "manifest.file")))
+
+ # With -v.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans2"
+ .format(self.transforms["drop_file"], self.durl6))
+ self.pkgrepo("-s {0} list -F json filetrans2".format(
+ self.dpath6))
+ out_json = json.loads(self.output)
+ elem = out_json[0]
+ ma = self.__get_manifest_contents(elem["pkg.fmri"], 6,
+ pub="test1")
+ self.assert_("etc/bronze1" not in ma)
+
+ # Change file path attribute.
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v filetrans3"
+ .format(self.transforms["file_path_change"], self.durl6))
+ self.pkgrepo("-s {0} list -F json filetrans3".format(
+ self.dpath6))
+ out_json = json.loads(self.output)
+ elem = out_json[0]
+ ma = self.__get_manifest_contents(elem["pkg.fmri"], 6,
+ pub="test1")
+ self.assert_("opt/bronze2" in ma)
+ self.pkgrepo("verify -s {0} --disable dependency"
+ .format(self.dpath6))
+
+ self.pkgrecv(self.durl5, "--raw --mog-file {0} -d {1} -v "
+ "filetrans3".format(self.transforms["file_path_change"],
+ self.dpath6))
+ pkgdir = xport_cfg.get_pkg_dir(pkg.fmri.PkgFmri(
+ self.test_mog[2]))
+ with open(os.path.join(pkgdir, "manifest")) as f:
+ mcontent = f.read()
+ self.assert_("opt/bronze2" in mcontent)
+ self.assert_(os.path.exists(os.path.join(pkgdir,
+ "manifest.file")))
+ with open(os.path.join(pkgdir, "manifest.file")) as f:
+ mf = f.read()
+ self.assert_("opt/bronze2" in mf)
+
+ self.pkgrecv(self.durl5, "--mog-file {0} -d {1} -v signature"
+ .format(self.transforms["pub_change"], self.durl6))
+ ma = self.__get_manifest_contents(elem["pkg.fmri"], 6,
+ pub="test1")
+ self.assert_("signature" not in ma)
+ self.pkgrepo("verify -s {0} --disable dependency"
+ .format(self.dpath6))
class TestPkgrecvHTTPS(pkg5unittest.HTTPSTestClass):
--- a/src/util/publish/pkgmogrify.py Tue Oct 06 15:15:19 2015 -0700
+++ b/src/util/publish/pkgmogrify.py Thu Oct 08 05:30:35 2015 +0000
@@ -27,24 +27,14 @@
import getopt
import gettext
import locale
-import os
-import re
-import shlex
-import six
import sys
import traceback
import warnings
-import pkg.actions
import pkg.misc as misc
+import pkg.mogrify as mog
from pkg.misc import PipeError
-macros = {}
-includes = []
-appends = []
-transforms = []
-printinfo = []
-
def usage(errmsg="", exitcode=2):
"""Emit a usage message and optionally prefix it with a more specific
@@ -59,623 +49,6 @@
[-O outputfile] [-P printfile] [inputfile ...]"""))
sys.exit(exitcode)
-def add_transform(transform, filename, lineno):
- """This routine adds a transform tuple to the list used
- to process actions."""
-
- # strip off transform
- s = transform[10:]
- # make error messages familiar
- transform = "<" + transform + ">"
-
- try:
- index = s.index("->")
- except ValueError:
- raise RuntimeError(_("Missing -> in transform"))
- matching = s[0:index].strip().split()
- types = [a for a in matching if "=" not in a]
- attrdict = pkg.actions.attrsfromstr(" ".join([a for a in matching if "=" in a]))
-
- for a in attrdict:
- try:
- attrdict[a] = re.compile(attrdict[a])
- except re.error as e:
- raise RuntimeError(
- _("transform ({transform}) has regexp error "
- "({err}) in matching clause"
- ).format(transform=transform, err=e))
-
- op = s[index+2:].strip().split(None, 1)
-
- # use closures to encapsulate desired operation
-
- if op[0] == "drop":
- if len(op) > 1:
- raise RuntimeError(
- _("transform ({0}) has 'drop' operation syntax error"
- ).format(transform))
- operation = lambda a, m, p, f, l: None
-
- elif op[0] == "set":
- try:
- attr, value = shlex.split(op[1])
- except ValueError:
- raise RuntimeError(
- _("transform ({0}) has 'set' operation syntax error"
- ).format(transform))
- def set_func(action, matches, pkg_attrs, filename, lineno):
- newattr = substitute_values(attr, action, matches,
- pkg_attrs, filename, lineno)
- newval = substitute_values(value, action, matches,
- pkg_attrs, filename, lineno)
- if newattr == "action.hash":
- if hasattr(action, "hash"):
- action.hash = newval
- else:
- action.attrs[newattr] = newval
- return action
- operation = set_func
-
- elif op[0] == "default":
- try:
- attr, value = shlex.split(op[1])
- except ValueError:
- raise RuntimeError(
- _("transform ({0}) has 'default' operation syntax error"
- ).format(transform))
-
- def default_func(action, matches, pkg_attrs, filename, lineno):
- newattr = substitute_values(attr, action, matches,
- pkg_attrs, filename, lineno)
- if newattr not in action.attrs:
- newval = substitute_values(value, action,
- matches, pkg_attrs, filename, lineno)
- action.attrs[newattr] = newval
- return action
- operation = default_func
-
- elif op[0] == "abort":
- if len(op) > 1:
- raise RuntimeError(_("transform ({0}) has 'abort' "
- "operation syntax error").format(transform))
-
- def abort_func(action, matches, pkg_attrs, filename, lineno):
- sys.exit(0)
-
- operation = abort_func
-
- elif op[0] == "exit":
- exitval = 0
- msg = None
-
- if len(op) == 2:
- args = op[1].split(None, 1)
- try:
- exitval = int(args[0])
- except ValueError:
- raise RuntimeError(_("transform ({0}) has 'exit' "
- "operation syntax error: illegal exit value").format(
- transform))
- if len(args) == 2:
- msg = args[1]
-
- def exit_func(action, matches, pkg_attrs, filename, lineno):
- if msg:
- newmsg = substitute_values(msg, action,
- matches, pkg_attrs, filename, lineno,
- quote=True)
- print(newmsg, file=sys.stderr)
- sys.exit(exitval)
-
- operation = exit_func
-
- elif op[0] == "add":
- try:
- attr, value = shlex.split(op[1])
- except ValueError:
- raise RuntimeError(
- _("transform ({0}) has 'add' operation syntax error"
- ).format(transform))
-
- def add_func(action, matches, pkg_attrs, filename, lineno):
- newattr = substitute_values(attr, action, matches,
- pkg_attrs, filename, lineno)
- newval = substitute_values(value, action, matches,
- pkg_attrs, filename, lineno)
- if newattr in action.attrs:
- av = action.attrs[newattr]
- if isinstance(av, list):
- action.attrs[newattr].append(newval)
- else:
- action.attrs[newattr] = [ av, newval ]
- else:
- action.attrs[newattr] = newval
- return action
- operation = add_func
-
- elif op[0] == "edit":
- if len(op) < 2:
- raise RuntimeError(
- _("transform ({0}) has 'edit' operation syntax error"
- ).format(transform))
-
- args = shlex.split(op[1])
- if len(args) not in [2, 3]:
- raise RuntimeError(
- _("transform ({0}) has 'edit' operation syntax error"
- ).format(transform))
- attr = args[0]
-
- # Run args[1] (the regexp) through substitute_values() with a
- # bunch of bogus values to see whether it triggers certain
- # exceptions. If it does, then substitution would have
- # occurred, and we can't compile the regex now, but wait until
- # we can correctly run substitute_values().
- try:
- substitute_values(args[1], None, [], None, None, None)
- regexp = re.compile(args[1])
- except (AttributeError, RuntimeError):
- regexp = args[1]
- except re.error as e:
- raise RuntimeError(
- _("transform ({transform}) has 'edit' operation "
- "with malformed regexp ({err})").format(
- transform=transform, err=e))
-
- if len(args) == 3:
- replace = args[2]
- else:
- replace = ""
-
- def replace_func(action, matches, pkg_attrs, filename, lineno):
- newattr = substitute_values(attr, action, matches,
- pkg_attrs, filename, lineno)
- newrep = substitute_values(replace, action, matches,
- pkg_attrs, filename, lineno)
- val = attrval_as_list(action.attrs, newattr)
-
- if not val:
- return action
-
- # It's now appropriate to compile the regexp, if there
- # are substitutions to be made. So do the substitution
- # and compile the result.
- if isinstance(regexp, six.string_types):
- rx = re.compile(substitute_values(regexp,
- action, matches, pkg_attrs, filename, lineno))
- else:
- rx = regexp
-
- try:
- action.attrs[newattr] = [
- rx.sub(newrep, v)
- for v in val
- ]
- except re.error as e:
- raise RuntimeError(
- _("transform ({transform}) has edit "
- "operation with replacement string regexp "
- "error {err}").format(
- transform=transform, err=e))
- return action
-
- operation = replace_func
-
- elif op[0] == "delete":
- if len(op) < 2:
- raise RuntimeError(
- _("transform ({0}) has 'delete' operation syntax error"
- ).format(transform))
-
- args = shlex.split(op[1])
- if len(args) != 2:
- raise RuntimeError(
- _("transform ({0}) has 'delete' operation syntax error"
- ).format(transform))
- attr = args[0]
-
- try:
- regexp = re.compile(args[1])
- except re.error as e:
- raise RuntimeError(
- _("transform ({transform}) has 'delete' operation"
- "with malformed regexp ({err})").format(
- transform=transform, err=e))
-
- def delete_func(action, matches, pkg_attrs, filename, lineno):
- val = attrval_as_list(action.attrs, attr)
- if not val:
- return action
- try:
- new_val = [
- v
- for v in val
- if not regexp.search(v)
- ]
-
- if new_val:
- action.attrs[attr] = new_val
- else:
- del action.attrs[attr]
- except re.error as e:
- raise RuntimeError(
- _("transform ({transform}) has delete "
- "operation with replacement string regexp "
- "error {err}").format(
- transform=transform, err=e))
- return action
-
- operation = delete_func
-
- elif op[0] == "print":
- if len(op) > 2:
- raise RuntimeError(_("transform ({0}) has 'print' "
- "operation syntax error").format(transform))
-
- if len(op) == 1:
- msg = ""
- else:
- msg = op[1]
-
- def print_func(action, matches, pkg_attrs, filename, lineno):
- newmsg = substitute_values(msg, action, matches,
- pkg_attrs, filename, lineno, quote=True)
-
- printinfo.append("{0}".format(newmsg))
- return action
-
- operation = print_func
-
- elif op[0] == "emit":
- if len(op) > 2:
- raise RuntimeError(_("transform ({0}) has 'emit' "
- "operation syntax error").format(transform))
-
- if len(op) == 1:
- msg = ""
- else:
- msg = op[1]
-
- def emit_func(action, matches, pkg_attrs, filename, lineno):
- newmsg = substitute_values(msg, action, matches,
- pkg_attrs, filename, lineno, quote=True)
-
- if not newmsg.strip() or newmsg.strip()[0] == "#":
- return (newmsg, action)
- try:
- return (pkg.actions.fromstr(newmsg), action)
- except (pkg.actions.MalformedActionError,
- pkg.actions.UnknownActionError,
- pkg.actions.InvalidActionError) as e:
- raise RuntimeError(e)
-
- operation = emit_func
-
- else:
- raise RuntimeError(_("unknown transform operation '{0}'").format(op[0]))
-
- transforms.append((types, attrdict, operation, filename, lineno, transform))
-
-def substitute_values(msg, action, matches, pkg_attrs, filename=None, lineno=None, quote=False):
- """Substitute tokens in messages which can be expanded to the action's
- attribute values."""
-
- newmsg = ""
- prevend = 0
- for i in re.finditer("%\((.+?)\)|%\{(.+?)\}", msg):
- m = i.string[slice(*i.span())]
- assert m[1] in "({"
- if m[1] == "(":
- group = 1
- elif m[1] == "{":
- group = 2
- d = {}
- if ";" in i.group(group):
- attrname, args = i.group(group).split(";", 1)
- tokstream = shlex.shlex(args)
- for tok in tokstream:
- if tok == ";":
- tok = tokstream.get_token()
- eq = tokstream.get_token()
- if eq == "" or eq == ";":
- val = True
- else:
- assert(eq == "=")
- val = tokstream.get_token()
- if ('"', '"') == (val[0], val[-1]):
- val = val[1:-1]
- elif ("'", "'") == (val[0], val[-1]):
- val = val[1:-1]
- d[tok] = val
- else:
- attrname = i.group(group)
-
- d.setdefault("quote", quote)
-
- if d.get("noquote", None):
- d["quote"] = False
-
- if group == 2:
- attr = pkg_attrs.get(attrname, d.get("notfound", None))
- if attr and len(attr) == 1:
- attr = attr[0]
- else:
- if attrname == "pkg.manifest.lineno":
- attr = str(lineno)
- elif attrname == "pkg.manifest.filename":
- attr = str(filename)
- elif attrname == "action.hash":
- attr = getattr(action, "hash",
- d.get("notfound", None))
- elif attrname == "action.key":
- attr = action.attrs.get(action.key_attr,
- d.get("notfound", None))
- elif attrname == "action.name":
- attr = action.name
- else:
- attr = action.attrs.get(attrname,
- d.get("notfound", None))
-
- if attr is None:
- raise RuntimeError(_("attribute '{0}' not found").format(
- attrname))
-
- def q(s):
- if " " in s or "'" in s or "\"" in s or s == "":
- if "\"" not in s:
- return '"{0}"'.format(s)
- elif "'" not in s:
- return "'{0}'".format(s)
- else:
- return '"{0}"'.format(s.replace("\"", "\\\""))
- else:
- return s
-
- if not d["quote"]:
- q = lambda x: x
-
- if isinstance(attr, six.string_types):
- newmsg += msg[prevend:i.start()] + \
- d.get("prefix", "") + q(attr) + d.get("suffix", "")
- else:
- newmsg += msg[prevend:i.start()] + \
- d.get("sep", " ").join([
- d.get("prefix", "") + q(v) + d.get("suffix", "")
- for v in attr
- ])
- prevend = i.end()
-
- newmsg += msg[prevend:]
-
- # Now see if there are any backreferences to match groups
- msg = newmsg
- newmsg = ""
- prevend = 0
- backrefs = sum((
- group
- for group in (
- match.groups()
- for match in matches
- if match.groups()
- )
- ), (None,))
- for i in re.finditer(r"%<\d>", msg):
- ref = int(i.string[slice(*i.span())][2:-1])
-
- if ref == 0 or ref > len(backrefs) - 1:
- raise RuntimeError(_("no match group {group:d} "
- "(max {maxgroups:d})").format(
- group=ref, maxgroups=len(backrefs) - 1))
- if backrefs[ref] is None:
- raise RuntimeError(_("Error\nInvalid backreference: "
- "%<{ref}> refers to an unmatched string"
- ).format(ref=ref))
- newmsg += msg[prevend:i.start()] + backrefs[ref]
- prevend = i.end()
-
- newmsg += msg[prevend:]
- return newmsg
-
-def attrval_as_list(attrdict, key):
- """Return specified attribute as list;
- an empty list if no such attribute exists"""
- if key not in attrdict:
- return []
- val = attrdict[key]
- if not isinstance(val, list):
- val = [val]
- return val
-
-class PkgAction(pkg.actions.generic.Action):
- name = "pkg"
- def __init__(self, attrs):
- self.attrs = attrs
-
-def apply_transforms(action, pkg_attrs, verbose, act_filename, act_lineno):
- """Apply all transforms to action, returning modified action
- or None if action is dropped"""
- comments = []
- newactions = []
- if verbose:
- comments.append("# Action: {0}".format(action))
- for types, attrdict, operation, filename, lineno, transform in transforms:
- if action is None:
- action = PkgAction(pkg_attrs)
- # skip if types are specified and none match
- if types and action.name not in types:
- continue
- # skip if some attrs don't exist
- if set(attrdict.keys()) - set(action.attrs.keys()):
- continue
-
- # Check to make sure all matching attrs actually match. The
- # order is effectively arbitrary, since they come from a dict.
- matches = [
- attrdict[key].match(attrval)
- for key in attrdict
- for attrval in attrval_as_list(action.attrs, key)
- ]
-
- if not all(matches):
- continue
-
- s = transform[11:transform.index("->")]
- # Map each pattern to its position in the original match string.
- matchorder = {}
- for attr, match in six.iteritems(attrdict):
- # Attributes might be quoted even if they don't need it,
- # and lead to a mis-match. These three patterns are all
- # safe to try. If we fail to find the match expression,
- # it's probably because it used different quoting rules
- # than the action code does, or from these three rules.
- # It might very well be okay, so we go ahead, but these
- # oddly quoted patterns will sort at the beginning, and
- # backref matching may be off.
- matchorder[match.pattern] = -1
- for qs in ("{0}={1}", "{0}=\"{1}\"", "{0}='{1}'"):
- pos = s.find(qs.format(attr, match.pattern))
- if pos != -1:
- matchorder[match.pattern] = pos
- break
-
- # Then sort the matches list by those positions.
- matches.sort(key=lambda x: matchorder[x.re.pattern])
-
- # time to apply transform operation
- try:
- if verbose:
- orig_attrs = action.attrs.copy()
- action = operation(action, matches, pkg_attrs,
- act_filename, act_lineno)
- except RuntimeError as e:
- raise RuntimeError("Transform specified in file {0}, line {1} reports {2}".format(
- filename, lineno, e))
- if isinstance(action, tuple):
- newactions.append(action[0])
- action = action[1]
- if verbose:
- if not action or \
- not isinstance(action, six.string_types) and \
- orig_attrs != action.attrs:
- comments.append("# Applied: {0} (file {1} line {2})".format(
- transform, filename, lineno))
- comments.append("# Result: {0}".format(action))
- if not action or isinstance(action, six.string_types):
- break
-
- # Any newly-created actions need to have the transforms applied, too.
- newnewactions = []
- for act in newactions:
- if not isinstance(act, six.string_types):
- c, al = apply_transforms(act, pkg_attrs, verbose,
- act_filename, act_lineno)
- if c:
- comments.append(c)
- newnewactions += [a for a in al if a is not None]
- else:
- newnewactions.append(act)
-
- if len(comments) == 1:
- comments = []
-
- if action and action.name != "pkg":
- return (comments, [action] + newnewactions)
- else:
- return (comments, [None] + newnewactions)
-
-
-def searching_open(filename, try_cwd=False):
- """ implement include hierarchy """
-
- if filename.startswith("/") or try_cwd == True and \
- os.path.exists(filename):
- try:
- return filename, open(filename)
- except IOError as e:
- raise RuntimeError(_("Cannot open file: {0}").format(e))
-
- for i in includes:
- f = os.path.join(i, filename)
- if os.path.exists(f):
- try:
- return f, open(f)
- except IOError as e:
- raise RuntimeError(_("Cannot open file: {0}").format(e))
-
- raise RuntimeError(_("File not found: \'{0}\'").format(filename))
-
-def apply_macros(s):
- """Apply macro subs defined on command line... keep applying
- macros until no translations are found."""
- while s and "$(" in s:
- for key in macros.keys():
- if key in s:
- value = macros[key]
- s = s.replace(key, value)
- break # look for more substitutions
- else:
- break # no more substitutable tokens
- return s
-
-def read_file(tp, ignoreincludes):
- """ return the lines in the file as a list of
- tuples containing (line, filename, line number);
- handle continuation and <include "path">"""
- ret = []
- filename, f = tp
-
- accumulate = ""
- for lineno, line in enumerate(f):
- lineno = lineno + 1 # number from 1
- line = line.strip()
- if not line: # preserve blanks
- ret.append((line, filename, lineno))
- continue
- if line.endswith("\\"):
- accumulate += line[0:-1]
- continue
- elif accumulate:
- line = accumulate + line
- accumulate = ""
-
- if line:
- line = apply_macros(line)
-
- line = line.strip()
-
- if not line:
- continue
-
- try:
- if line.startswith("<") and line.endswith(">"):
- if line.startswith("<include"):
- if not ignoreincludes:
- line = line[1:-1]
- line = line[7:].strip()
- line = line.strip('"')
- ret.extend(read_file(
- searching_open(line),
- ignoreincludes))
- else:
- ret.append((line, filename, lineno))
- elif line.startswith("<transform"):
- line = line[1:-1]
- add_transform(line, filename, lineno)
- else:
- raise RuntimeError(_("unknown command {0}").format(
- line))
- else:
- ret.append((line, filename, lineno))
- except RuntimeError as e:
- error(_("File {file}, line {line:d}: {exception}").format(
- file=filename,
- line=lineno,
- exception=e),
- exitcode=None)
- raise RuntimeError("<included from>")
-
- return ret
-
def error(text, exitcode=1):
"""Emit an error message prefixed by the command name """
@@ -691,6 +64,10 @@
printfilename = None
verbose = False
ignoreincludes = False
+ includes = []
+ macros = {}
+ printinfo = []
+ output = []
try:
opts, pargs = getopt.getopt(sys.argv[1:], "ivD:I:O:P:?", ["help"])
@@ -719,65 +96,11 @@
usage(_("illegal global option -- {0}").format(e.opt))
try:
- if pargs:
- infiles = [ searching_open(f, try_cwd=True) for f in pargs ]
- else:
- infiles = [("<stdin>", sys.stdin)]
-
- lines = []
- except RuntimeError as e:
- error(_("Error processing input arguments: {0}").format(e))
- try:
- for f in infiles:
- lines.extend(read_file(f, ignoreincludes))
- lines.append((None, f[0], None))
+ mog.process_mog(pargs, ignoreincludes, verbose, includes,
+ macros, printinfo, output, error_cb=error)
except RuntimeError as e:
sys.exit(1)
- output = []
-
- pkg_attrs = {}
- for line, filename, lineno in lines:
- if line is None:
- if "pkg.fmri" in pkg_attrs:
- comment, a = apply_transforms(None, pkg_attrs,
- verbose, filename, lineno)
- output.append((comment, a, None))
- pkg_attrs = {}
- continue
-
- if not line or line.startswith("#") or line.startswith("<"):
- output.append(([line], [], None))
- continue
-
- if line.startswith("$("): #prepended unexpanded macro
- # doesn't handle nested macros
- eom = line.index(")") + 1
- prepended_macro = line[0:eom]
- line = line[eom:]
- else:
- prepended_macro = None
-
- try:
- act = pkg.actions.fromstr(line)
- except (pkg.actions.MalformedActionError,
- pkg.actions.UnknownActionError,
- pkg.actions.InvalidActionError) as e:
- error("File {0} line {1:d}: {2}".format(filename, lineno, e))
- try:
- if act.name == "set":
- name = act.attrs["name"]
- value = act.attrs["value"]
- if isinstance(value, six.string_types):
- pkg_attrs.setdefault(name, []).append(value)
- else:
- pkg_attrs.setdefault(name, []).extend(value)
- comment, a = apply_transforms(act, pkg_attrs, verbose,
- filename, lineno)
- output.append((comment, a, prepended_macro))
- except RuntimeError as e:
- error("File {0} line {1:d}: {2}".format(filename, lineno, e))
-
try:
if printfilename == None:
printfile = sys.stdout