Merge "Add netaddr requirements for running ipv4|ipv6 filters" into feature/zuulv3
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index 1937cd5..70e999e 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -569,6 +569,11 @@
// Toggle showing/hiding the patchset when the header is
// clicked.
+ if (e.target.nodeName.toLowerCase() === 'a') {
+ // Ignore clicks from gerrit patch set link
+ return;
+ }
+
// Grab the patchset panel
var $panel = $(e.target).parents('.zuul-change');
var $body = $panel.children('.zuul-patchset-body');
@@ -652,7 +657,7 @@
setTimeout(function() {app.schedule(app);}, 5000);
return;
}
- app.update().complete(function () {
+ app.update().always(function () {
setTimeout(function() {app.schedule(app);}, 5000);
});
@@ -722,7 +727,7 @@
.removeClass('zuul-msg-wrap-off')
.show();
})
- .complete(function () {
+ .always(function () {
xhr = undefined;
app.emit('update-end');
});
diff --git a/playbooks/zuul-migrate.yaml b/playbooks/zuul-migrate.yaml
index 66c7bd5..c789501 100644
--- a/playbooks/zuul-migrate.yaml
+++ b/playbooks/zuul-migrate.yaml
@@ -9,10 +9,18 @@
args:
chdir: src/git.openstack.org/openstack-infra/project-config
- - name: Collect generated files
+ - name: Collect generated job config
synchronize:
dest: "{{ zuul.executor.log_root }}"
mode: pull
src: "src/git.openstack.org/openstack-infra/project-config/zuul.d"
verify_host: true
no_log: true
+
+ - name: Collect generated playbooks
+ synchronize:
+ dest: "{{ zuul.executor.log_root }}"
+ mode: pull
+ src: "src/git.openstack.org/openstack-infra/project-config/playbooks"
+ verify_host: true
+ no_log: true
diff --git a/zuul/cmd/migrate.py b/zuul/cmd/migrate.py
index fd53987..a1d7111 100644
--- a/zuul/cmd/migrate.py
+++ b/zuul/cmd/migrate.py
@@ -34,9 +34,12 @@
import jenkins_jobs.builder
from jenkins_jobs.formatter import deep_format
import jenkins_jobs.formatter
+from jenkins_jobs.parser import matches
import jenkins_jobs.parser
import yaml
+JOBS_BY_ORIG_TEMPLATE = {} # type: ignore
+SUFFIXES = [] # type: ignore
DESCRIPTION = """Migrate zuul v2 and Jenkins Job Builder to Zuul v3.
This program takes a zuul v2 layout.yaml and a collection of Jenkins Job
@@ -44,6 +47,8 @@
optional mapping config can be given that defines how to map old jobs
to new jobs.
"""
+
+
def project_representer(dumper, data):
return dumper.represent_mapping('tag:yaml.org,2002:map',
data.items())
@@ -109,6 +114,7 @@
def ordered_load(stream, *args, **kwargs):
return yaml.load(stream=stream, *args, **kwargs)
+
def ordered_dump(data, stream=None, *args, **kwargs):
dumper = IndentedDumper
# We need to do this because of how template expasion into a project
@@ -118,6 +124,7 @@
return yaml.dump(data, stream=stream, default_flow_style=False,
Dumper=dumper, width=80, *args, **kwargs)
+
def get_single_key(var):
if isinstance(var, str):
return var
@@ -134,7 +141,7 @@
dict_keys = list(var.keys())
if len(dict_keys) != 1:
return False
- if var[get_single_key(from_dict)]:
+ if var[get_single_key(var)]:
return False
return True
@@ -214,9 +221,16 @@
params[key] = template[key]
params['template-name'] = template_name
+ project_name = params['name']
+ params['name'] = '$ZUUL_SHORT_PROJECT_NAME'
expanded = deep_format(template, params, allow_empty_variables)
job_name = expanded.get('name')
+ templated_job_name = job_name
+ if job_name:
+ job_name = job_name.replace(
+ '$ZUUL_SHORT_PROJECT_NAME', project_name)
+ expanded['name'] = job_name
if jobs_glob and not matches(job_name, jobs_glob):
continue
@@ -224,9 +238,10 @@
expanded['orig_template'] = orig_template
expanded['template_name'] = template_name
self.jobs.append(expanded)
+ JOBS_BY_ORIG_TEMPLATE[templated_job_name] = expanded
-
-jenkins_jobs.parser.YamlParser.expandYamlForTemplateJob = expandYamlForTemplateJob
+jenkins_jobs.parser.YamlParser.expandYamlForTemplateJob = \
+ expandYamlForTemplateJob
class JJB(jenkins_jobs.builder.Builder):
@@ -283,6 +298,8 @@
class Job:
+ log = logging.getLogger("zuul.Migrate")
+
def __init__(self,
orig: str,
name: str=None,
@@ -301,25 +318,28 @@
self.parent = parent
self.branch = None
self.files = None
+ self.jjb_job = None
+ self.emit = True
if self.content and not self.name:
self.name = get_single_key(content)
if not self.name:
self.name = self.orig
self.name = self.name.replace('-{name}', '').replace('{name}-', '')
- if self.orig.endswith('-nv'):
- self.voting = False
- if self.name.endswith('-nv'):
- # NOTE(mordred) This MIGHT not be safe - it's possible, although
- # silly, for someone to have -nv and normal versions of the same
- # job in the same pipeline. Let's deal with that if we find it
- # though.
- self.name = self.name.replace('-nv', '')
+
+ for suffix in SUFFIXES:
+ suffix = '-{suffix}'.format(suffix=suffix)
+
+ if self.name.endswith(suffix):
+ self.name = self.name.replace(suffix, '')
def _stripNodeName(self, node):
node_key = '-{node}'.format(node=node)
self.name = self.name.replace(node_key, '')
+ def setNoEmit(self):
+ self.emit = False
+
def setVars(self, vars):
self.vars = vars
@@ -350,7 +370,252 @@
def getNodes(self):
return self.nodes
- def toDict(self):
+ def addJJBJob(self, jobs):
+ if '{name}' in self.orig:
+ self.jjb_job = JOBS_BY_ORIG_TEMPLATE[self.orig.format(
+ name='$ZUUL_SHORT_PROJECT_NAME')]
+ else:
+ self.jjb_job = jobs[self.orig]
+
+ def getTimeout(self):
+ if self.jjb_job:
+ for wrapper in self.jjb_job.get('wrappers', []):
+ if isinstance(wrapper, dict):
+ build_timeout = wrapper.get('timeout')
+ if isinstance(build_timeout, dict):
+ timeout = build_timeout.get('timeout')
+ if timeout is not None:
+ timeout = int(timeout) * 60
+
+ @property
+ def short_name(self):
+ return self.name.replace('legacy-', '')
+
+ @property
+ def job_path(self):
+ return 'playbooks/legacy/{name}'.format(name=self.short_name)
+
+ def _getRsyncOptions(self, source):
+ # If the source starts with ** then we want to match any
+ # number of directories, so don't anchor the include filter.
+ # If it does not start with **, then the intent is likely to
+ # at least start by matching an immediate file or subdirectory
+ # (even if later we have a ** in the middle), so in this case,
+ # anchor it to the root of the transfer (the workspace).
+ if not source.startswith('**'):
+ source = os.path.join('/', source)
+ # These options mean: include the thing we want, include any
+ # directories (so that we continue to search for the thing we
+ # want no matter how deep it is), exclude anything that
+ # doesn't match the thing we want or is a directory, then get
+ # rid of empty directories left over at the end.
+ rsync_opts = ['--include="%s"' % source,
+ '--include="*/"',
+ '--exclude="*"',
+ '--prune-empty-dirs']
+ return rsync_opts
+
+ def _makeSCPTask(self, publisher):
+ tasks = []
+ artifacts = False
+ site = publisher['scp']['site']
+ for scpfile in publisher['scp']['files']:
+ if 'ZUUL_PROJECT' in scpfile.get('source', ''):
+ self.log.error(
+ "Job {name} uses ZUUL_PROJECT in source".format(
+ name=self.name))
+ continue
+
+ if scpfile.get('copy-console'):
+ continue
+ else:
+ rsync_opts = self._getRsyncOptions(scpfile['source'])
+
+ target = scpfile['target']
+ # TODO(mordred) Generalize this next section, it's SUPER
+ # openstack specific. We can likely do this in mapping.yaml
+ if site == 'static.openstack.org':
+ for f in ('service-types', 'specs', 'docs-draft'):
+ if target.startswith(f):
+ self.log.error(
+ "Job {name} uses {f} publishing".format(
+ name=self.name, f=f))
+ continue
+ target = target.replace(
+ 'logs/$LOG_PATH', "{{ zuul.executor.work_root }}")
+ elif site == 'tarballs.openstack.org':
+ if not target.startswith('tarballs'):
+ self.log.error(
+ 'Job {name} wants to publish artifacts to non'
+ ' tarballs dir'.format(name=self.name))
+ continue
+ if target.startswith('tarballs/ci'):
+ target = target.split('/', 3)[-1]
+ else:
+ target = target.split('/', 2)[-1]
+ target = "{{ zuul.executor.work_root }}/artifacts/" + target
+ artifacts = True
+ elif site == 'yaml2ical':
+ self.log.error('Job {name} uses yaml2ical publisher')
+ continue
+
+ syncargs = collections.OrderedDict(
+ src="{{ ansible_user_dir }}",
+ dest=target,
+ copy_links='yes',
+ verify_host=True,
+ mode='pull')
+ if rsync_opts:
+ syncargs['rsync_opts'] = rsync_opts
+ task = collections.OrderedDict(
+ name='copy files from node',
+ synchronize=syncargs,
+ no_log=True)
+ # We don't use retry_args here because there is a bug in
+ # the synchronize module that breaks subsequent attempts at
+ # retrying. Better to try once and get an accurate error
+ # message if it fails.
+ # https://github.com/ansible/ansible/issues/18281
+ tasks.append(task)
+
+ if artifacts:
+ ensure_task = collections.OrderedDict()
+ ensure_task['name'] = 'Ensure artifacts directory exists'
+ ensure_task['file'] = collections.OrderedDict(
+ path="{{ zuul.executor.work_root }}/artifacts",
+ state='directory')
+ ensure_task['delegate_to'] = 'localhost'
+ tasks.insert(0, ensure_task)
+ return tasks, artifacts
+
+ def _makeBuilderTask(self, playbook_dir, builder, sequence):
+ script_fn = '%s-%02d.sh' % (self.short_name, sequence)
+ script_path = os.path.join(playbook_dir, script_fn)
+ # Don't write a script to echo the template line
+ if builder['shell'].startswith('echo JJB template: '):
+ return
+ with open(script_path, 'w') as script:
+ data = builder['shell']
+ if not data.startswith('#!'):
+ data = '#!/bin/bash -x\n %s' % (data,)
+ script.write(data)
+
+ task = collections.OrderedDict()
+ task['name'] = 'Builder script {seq} translated from {old}'.format(
+ seq=sequence, old=self.orig)
+ task['script'] = script_fn
+ task['environment'] = (
+ '{{ host_vars[inventory_hostname] | zuul_legacy_vars }}')
+ return task
+
+ def _transformPublishers(self, jjb_job):
+ early_publishers = []
+ late_publishers = []
+ old_publishers = jjb_job.get('publishers', [])
+ for publisher in old_publishers:
+ early_scpfiles = []
+ late_scpfiles = []
+ if 'scp' not in publisher:
+ early_publishers.append(publisher)
+ continue
+ copy_console = False
+ for scpfile in publisher['scp']['files']:
+ if scpfile.get('copy-console'):
+ scpfile['keep-hierarchy'] = True
+ late_scpfiles.append(scpfile)
+ copy_console = True
+ else:
+ early_scpfiles.append(scpfile)
+ publisher['scp']['files'] = early_scpfiles + late_scpfiles
+ if copy_console:
+ late_publishers.append(publisher)
+ else:
+ early_publishers.append(publisher)
+ publishers = early_publishers + late_publishers
+ if old_publishers != publishers:
+ self.log.debug("Transformed job publishers")
+ return early_publishers, late_publishers
+
+ def emitPlaybooks(self, jobsdir):
+ has_artifacts = False
+ if not self.jjb_job:
+ if self.emit:
+ self.log.error(
+ 'Job {name} has no job content'.format(name=self.name))
+ return False, False
+
+ playbook_dir = os.path.join(jobsdir, self.job_path)
+ if not os.path.exists(playbook_dir):
+ os.makedirs(playbook_dir)
+
+ run_playbook = os.path.join(self.job_path, 'run.yaml')
+ post_playbook = os.path.join(self.job_path, 'post.yaml')
+ tasks = []
+ sequence = 0
+ tasks.append(collections.OrderedDict(
+ debug=collections.OrderedDict(
+ msg='Autoconverted job {name} from old job {old}'.format(
+ name=self.name, old=self.orig))))
+ for builder in self.jjb_job.get('builders', []):
+ if 'shell' in builder:
+ task = self._makeBuilderTask(playbook_dir, builder, sequence)
+ if task:
+ sequence += 1
+ tasks.append(task)
+ play = collections.OrderedDict(
+ hosts='all',
+ tasks=tasks)
+ with open(run_playbook, 'w') as run_playbook_out:
+ ordered_dump([play], run_playbook_out)
+
+ has_post = False
+ tasks = []
+ early_publishers, late_publishers = self._transformPublishers(
+ self.jjb_job)
+ for publishers in [early_publishers, late_publishers]:
+ for publisher in publishers:
+ if 'scp' in publisher:
+ task, artifacts = self._makeSCPTask(publisher)
+ if artifacts:
+ has_artifacts = True
+ tasks.extend(task)
+ if 'afs' in builder:
+ self.log.error(
+ "Job {name} uses AFS publisher".format(name=self.name))
+ if tasks:
+ has_post = True
+ play = collections.OrderedDict()
+ play['hosts'] = 'all'
+ play['tasks'] = tasks
+ with open(post_playbook, 'w') as post_playbook_out:
+ ordered_dump([play], post_playbook_out)
+ return has_artifacts, has_post
+
+ def toJobDict(self, has_artifacts=True, has_post=True):
+ output = collections.OrderedDict()
+ output['name'] = self.name
+ if has_artifacts:
+ output['parent'] = 'publish-openstack-artifacts'
+ output['run'] = os.path.join(self.job_path, 'run.yaml')
+ if has_post:
+ output['post-run'] = os.path.join(self.job_path, 'post.yaml')
+
+ if self.vars:
+ output['vars'] = self.vars.copy()
+ timeout = self.getTimeout()
+ if timeout:
+ output['timeout'] = timeout
+ output['vars']['BUILD_TIMEOUT'] = str(timeout * 1000)
+
+ if self.nodes:
+ output['nodes'] = self.getNodes()
+
+ if self.required_projects:
+ output['required-projects'] = self.required_projects
+
+ return output
+
+ def toPipelineDict(self):
if self.content:
output = self.content
else:
@@ -363,9 +628,6 @@
if not self.voting:
output[self.name].setdefault('voting', False)
- if self.nodes:
- output[self.name].setdefault('nodes', self.getNodes())
-
if self.required_projects:
output[self.name].setdefault(
'required-projects', self.required_projects)
@@ -395,6 +657,8 @@
self.labels = []
self.job_mapping = []
self.template_mapping = {}
+ self.jjb_jobs = {}
+ self.seen_new_jobs = []
nodepool_data = ordered_load(open(nodepool_config, 'r'))
for label in nodepool_data['labels']:
self.labels.append(label['name'])
@@ -403,6 +667,8 @@
else:
mapping_data = ordered_load(open(mapping_file, 'r'))
self.default_node = mapping_data['default-node']
+ global SUFFIXES
+ SUFFIXES = mapping_data.get('strip-suffixes', [])
for map_info in mapping_data.get('job-mapping', []):
if map_info['old'].startswith('^'):
map_info['pattern'] = re.compile(map_info['old'])
@@ -419,6 +685,9 @@
def hasProjectTemplate(self, old_name):
return old_name in self.template_mapping
+ def setJJBJobs(self, jjb_jobs):
+ self.jjb_jobs = jjb_jobs
+
def getNewTemplateName(self, old_name):
return self.template_mapping.get(old_name, old_name)
@@ -457,14 +726,21 @@
def getNewJob(self, job_name, remove_gate):
if job_name in self.job_direct:
if isinstance(self.job_direct[job_name], dict):
- return Job(job_name, content=self.job_direct[job_name])
+ job = Job(job_name, content=self.job_direct[job_name])
else:
- return Job(job_name, name=self.job_direct[job_name])
+ job = Job(job_name, name=self.job_direct[job_name])
+ if job_name not in self.seen_new_jobs:
+ self.seen_new_jobs.append(self.job_direct[job_name])
+ job.setNoEmit()
+ return job
new_job = None
for map_info in self.job_mapping:
new_job = self.mapNewJob(job_name, map_info)
if new_job:
+ if job_name not in self.seen_new_jobs:
+ self.seen_new_jobs.append(new_job.name)
+ new_job.setNoEmit()
break
if not new_job:
orig_name = job_name
@@ -488,8 +764,10 @@
if layout_job.get('files'):
new_job.files = layout_job['files']
+ new_job.addJJBJob(self.jjb_jobs)
return new_job
+
class ChangeQueue:
def __init__(self):
self.name = ''
@@ -516,7 +794,7 @@
for job in self._jobs:
if job.queue_name:
if (self.assigned_name and
- job.queue_name != self.assigned_name):
+ job.queue_name != self.assigned_name):
raise Exception("More than one name assigned to "
"change queue: %s != %s" %
(self.assigned_name,
@@ -528,6 +806,7 @@
for project in other.projects:
self.addProject(project)
+
class ZuulMigrate:
log = logging.getLogger("zuul.Migrate")
@@ -563,6 +842,7 @@
unseen.discard(job['name'])
for name in unseen:
del self.jobs[name]
+ self.mapping.setJJBJobs(self.jobs)
def getOldJob(self, name):
if name not in self.old_jobs:
@@ -607,7 +887,8 @@
continue
if 'gate' not in pt['name']:
continue
- gate_jobs |= set(self.flattenOldJobs(pt['gate'], project['name']))
+ gate_jobs |= set(self.flattenOldJobs(pt['gate'],
+ project['name']))
gate_jobs |= set(self.flattenOldJobs(project['gate']))
old_project = OldProject(project['name'], gate_jobs)
change_queue = ChangeQueue()
@@ -653,14 +934,15 @@
zuul_yaml = os.path.join(self.outdir, 'zuul.yaml')
zuul_d = os.path.join(self.outdir, 'zuul.d')
orig = os.path.join(zuul_d, '01zuul.yaml')
- outfile = os.path.join(zuul_d, '99converted.yaml')
+ job_outfile = os.path.join(zuul_d, '99converted-jobs.yaml')
+ project_outfile = os.path.join(zuul_d, '99converted-projects.yaml')
if not os.path.exists(self.outdir):
os.makedirs(self.outdir)
if not os.path.exists(zuul_d):
os.makedirs(zuul_d)
if os.path.exists(zuul_yaml) and self.move:
os.rename(zuul_yaml, orig)
- return outfile
+ return job_outfile, project_outfile
def makeNewJobs(self, old_job, parent: Job=None):
self.log.debug("makeNewJobs(%s)", old_job)
@@ -682,7 +964,7 @@
elif isinstance(old_job, dict):
parent_name = get_single_key(old_job)
- parent = Job(orig=parent_name, parent=parent)
+ parent = self.makeNewJobs(parent_name, parent=parent)[0]
jobs = self.makeNewJobs(old_job[parent_name], parent=parent)
for job in jobs:
@@ -702,7 +984,7 @@
# job name to new job name when expanding templates into projects.
tmp = [job for job in self.makeNewJobs(value)]
self.job_objects.extend(tmp)
- jobs = [job.toDict() for job in tmp]
+ jobs = [job.toPipelineDict() for job in tmp]
new_template[key] = dict(jobs=jobs)
return new_template
@@ -814,15 +1096,16 @@
if k in ('template', 'name'):
continue
processPipeline(project[k]['jobs'], job_name_regex, files)
-
+
for matcher in matchers:
# find the project-specific section
for skipper in matcher.get('skip-if', []):
if skipper.get('project'):
if re.search(skipper['project'], project['name']):
- if 'all-files-match-any' in skipper:
- applyIrrelevantFiles(matcher['name'],
- skipper['all-files-match-any'])
+ if 'all-files-match-any' in skipper:
+ applyIrrelevantFiles(
+ matcher['name'],
+ skipper['all-files-match-any'])
def writeProject(self, project):
'''
@@ -868,7 +1151,7 @@
new_project[key]['queue'] = queue.name
tmp = [job for job in self.makeNewJobs(value)]
self.job_objects.extend(tmp)
- jobs = [job.toDict() for job in tmp]
+ jobs = [job.toPipelineDict() for job in tmp]
new_project[key]['jobs'] = jobs
for name in templates_to_expand:
@@ -882,23 +1165,38 @@
return new_project
def writeJobs(self):
- outfile = self.setupDir()
- config = []
+ job_outfile, project_outfile = self.setupDir()
+ job_config = []
+ project_config = []
for template in self.layout.get('project-templates', []):
self.log.debug("Processing template: %s", template)
+ new_template = self.writeProjectTemplate(template)
+ self.new_templates[new_template['name']] = new_template
if not self.mapping.hasProjectTemplate(template['name']):
- new_template = self.writeProjectTemplate(template)
- self.new_templates[new_template['name']] = new_template
- config.append({'project-template': new_template})
+ job_config.append({'project-template': new_template})
for project in self.layout.get('projects', []):
- config.append(
+ project_config.append(
{'project': self.writeProject(project)})
- with open(outfile, 'w') as yamlout:
+ seen_jobs = []
+ for job in sorted(self.job_objects, key=lambda job: job.name):
+ if (job.name not in seen_jobs and
+ job.name not in self.mapping.seen_new_jobs and
+ job.emit):
+ has_artifacts, has_post = job.emitPlaybooks(self.outdir)
+ job_config.append({'job': job.toJobDict(
+ has_artifacts, has_post)})
+ seen_jobs.append(job.name)
+
+ with open(job_outfile, 'w') as yamlout:
# Insert an extra space between top-level list items
- yamlout.write(ordered_dump(config).replace('\n-', '\n\n-'))
+ yamlout.write(ordered_dump(job_config).replace('\n-', '\n\n-'))
+
+ with open(project_outfile, 'w') as yamlout:
+ # Insert an extra space between top-level list items
+ yamlout.write(ordered_dump(project_config).replace('\n-', '\n\n-'))
def main():