Skip to content

Commit

Permalink
Bug 1349980 - update task generation to support -g in try syntax for …
Browse files Browse the repository at this point in the history
…running OS X tasks in generic-worker,r=dustin
  • Loading branch information
petemoore committed Apr 6, 2017
1 parent 665322c commit 7edd039
Show file tree
Hide file tree
Showing 7 changed files with 197 additions and 59 deletions.
1 change: 1 addition & 0 deletions AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -797,6 +797,7 @@ Pete Collins <petejc@collab.net>
Peter Annema <disttsc@bart.nl>
Peter Bajusz <hyp-x@inf.bme.hu>
Peter Lubczynski <peterl@netscape.com>
Peter Moore <petemoore@gmx.net>
Peter Naulls
Peter Parente <parente@cs.unc.edu>
Peter Seliger
Expand Down
22 changes: 16 additions & 6 deletions taskcluster/taskgraph/transforms/job/mozharness.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

from __future__ import absolute_import, print_function, unicode_literals

from textwrap import dedent

from taskgraph.util.schema import Schema
from voluptuous import Required, Optional, Any

Expand Down Expand Up @@ -168,7 +170,7 @@ def mozharness_on_docker_worker_setup(config, job, taskdesc):

# We use the generic worker to run tasks on Windows
@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
def mozharness_on_windows(config, job, taskdesc):
def mozharness_on_generic_worker(config, job, taskdesc):
run = job['run']

# fail if invalid run options are included
Expand All @@ -187,7 +189,7 @@ def mozharness_on_windows(config, job, taskdesc):
worker = taskdesc['worker']

worker['artifacts'] = [{
'path': r'public\build',
'path': r'public/build',
'type': 'directory',
}]

Expand All @@ -199,6 +201,11 @@ def mozharness_on_windows(config, job, taskdesc):
'MOZ_SCM_LEVEL': config.params['level'],
})

if not job['attributes']['build_platform'].startswith('win'):
raise Exception(
"Task generation for mozharness build jobs currently only supported on Windows"
)

mh_command = [r'c:\mozilla-build\python\python.exe']
mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')]))
for cfg in run['config']:
Expand All @@ -218,12 +225,15 @@ def mozharness_on_windows(config, job, taskdesc):
hg_command.append('.\\build\\src')

worker['command'] = []
# sccache currently uses the full compiler commandline as input to the
# cache hash key, so create a symlink to the task dir and build from
# the symlink dir to get consistent paths.
if taskdesc.get('needs-sccache'):
worker['command'].extend([
r'if exist z:\build rmdir z:\build',
# Make the comment part of the first command, as it will help users to
# understand what is going on, and why these steps are implemented.
dedent('''\
:: sccache currently uses the full compiler commandline as input to the
:: cache hash key, so create a symlink to the task dir and build from
:: the symlink dir to get consistent paths.
if exist z:\\build rmdir z:\\build'''),
r'mklink /d z:\build %cd%',
# Grant delete permission on the link to everyone.
r'icacls z:\build /grant *S-1-1-0:D /L',
Expand Down
113 changes: 83 additions & 30 deletions taskcluster/taskgraph/transforms/job/mozharness_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,56 +169,63 @@ def mozharness_test_on_docker(config, job, taskdesc):


@run_job_using('generic-worker', 'mozharness-test', schema=mozharness_test_run_schema)
def mozharness_test_on_windows(config, job, taskdesc):
def mozharness_test_on_generic_worker(config, job, taskdesc):
test = taskdesc['run']['test']
mozharness = test['mozharness']
worker = taskdesc['worker']

artifacts = [
{
'path': 'public\\logs\\localconfig.json',
'name': 'public/logs/localconfig.json',
'path': 'logs/localconfig.json',
'type': 'file'
},
{
'path': 'public\\logs\\log_critical.log',
'name': 'public/logs/log_critical.log',
'path': 'logs/log_critical.log',
'type': 'file'
},
{
'path': 'public\\logs\\log_error.log',
'name': 'public/logs/log_error.log',
'path': 'logs/log_error.log',
'type': 'file'
},
{
'path': 'public\\logs\\log_fatal.log',
'name': 'public/logs/log_fatal.log',
'path': 'logs/log_fatal.log',
'type': 'file'
},
{
'path': 'public\\logs\\log_info.log',
'name': 'public/logs/log_info.log',
'path': 'logs/log_info.log',
'type': 'file'
},
{
'path': 'public\\logs\\log_raw.log',
'name': 'public/logs/log_raw.log',
'path': 'logs/log_raw.log',
'type': 'file'
},
{
'path': 'public\\logs\\log_warning.log',
'name': 'public/logs/log_warning.log',
'path': 'logs/log_warning.log',
'type': 'file'
},
{
'path': 'public\\test_info',
'name': 'public/test_info',
'path': 'build/blobber_upload_dir',
'type': 'directory'
}
]

build_platform = taskdesc['attributes']['build_platform']

target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform)
target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform) \
if build_platform.startswith('win') else 'target'

installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])

installer_url = get_artifact_url(
'<build>', 'public/build/{}.zip'.format(target))
test_packages_url = get_artifact_url(
'<build>', 'public/build/{}.test_packages.json'.format(target))
mozharness_url = get_artifact_url(
'<build>', 'public/build/mozharness.zip')

taskdesc['scopes'].extend(
['generic-worker:os-group:{}'.format(group) for group in test['os-groups']])
Expand All @@ -228,14 +235,48 @@ def mozharness_test_on_windows(config, job, taskdesc):
worker['max-run-time'] = test['max-run-time']
worker['artifacts'] = artifacts

# assemble the command line
mh_command = [
'c:\\mozilla-build\\python\\python.exe',
'-u',
'mozharness\\scripts\\' + normpath(mozharness['script'])
]
# this list will get cleaned up / reduced / removed in bug 1354088
if build_platform.startswith('macosx'):
worker['env'] = {
'IDLEIZER_DISABLE_SHUTDOWN': 'true',
'LANG': 'en_US.UTF-8',
'LC_ALL': 'en_US.UTF-8',
'MOZ_HIDE_RESULTS_TABLE': '1',
'MOZ_NODE_PATH': '/usr/local/bin/node',
'MOZ_NO_REMOTE': '1',
'NO_EM_RESTART': '1',
'NO_FAIL_ON_TEST_ERRORS': '1',
'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin',
'SHELL': '/bin/bash',
'XPCOM_DEBUG_BREAK': 'warn',
'XPC_FLAGS': '0x0',
'XPC_SERVICE_NAME': '0'
}

if build_platform.startswith('macosx'):
mh_command = [
'python2.7',
'-u',
'mozharness/scripts/' + mozharness['script']
]
elif build_platform.startswith('win'):
mh_command = [
'c:\\mozilla-build\\python\\python.exe',
'-u',
'mozharness\\scripts\\' + normpath(mozharness['script'])
]
else:
mh_command = [
'python',
'-u',
'mozharness/scripts/' + mozharness['script']
]

for mh_config in mozharness['config']:
mh_command.extend(['--cfg', 'mozharness\\configs\\' + normpath(mh_config)])
cfg_path = 'mozharness/configs/' + mh_config
if build_platform.startswith('win'):
cfg_path = normpath(cfg_path)
mh_command.extend(['--cfg', cfg_path])
mh_command.extend(mozharness.get('extra-options', []))
if mozharness.get('no-read-buildbot-config'):
mh_command.append('--no-read-buildbot-config')
Expand All @@ -259,16 +300,28 @@ def mozharness_test_on_windows(config, job, taskdesc):
if isinstance(c, basestring) and c.startswith('--test-suite'):
mh_command[i] += suffix

# bug 1311966 - symlink to artifacts until generic worker supports virtual artifact paths
artifact_link_commands = ['mklink /d %cd%\\public\\test_info %cd%\\build\\blobber_upload_dir']
for link in [a['path'] for a in artifacts if a['path'].startswith('public\\logs\\')]:
artifact_link_commands.append('mklink %cd%\\{} %cd%\\{}'.format(link, link[7:]))
worker['mounts'] = [{
'directory': '.',
'content': {
'artifact': 'public/build/mozharness.zip',
'task-id': {
'task-reference': '<build>'
}
},
'format': 'zip'
}]

worker['command'] = artifact_link_commands + [
{'task-reference': 'c:\\mozilla-build\\wget\\wget.exe {}'.format(mozharness_url)},
'c:\\mozilla-build\\info-zip\\unzip.exe mozharness.zip',
{'task-reference': ' '.join(mh_command)}
]
if build_platform.startswith('win'):
worker['command'] = [
{'task-reference': ' '.join(mh_command)}
]
else:
mh_command_task_ref = []
for token in mh_command:
mh_command_task_ref.append({'task-reference': token})
worker['command'] = [
mh_command_task_ref
]


@run_job_using('native-engine', 'mozharness-test', schema=mozharness_test_run_schema)
Expand Down
2 changes: 1 addition & 1 deletion taskcluster/taskgraph/transforms/job/toolchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def windows_toolchain(config, job, taskdesc):
svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level'])
worker['mounts'] = [{
'cache-name': svn_cache,
'path': r'llvm-sources',
'directory': r'llvm-sources',
}]
taskdesc['scopes'].extend([
'generic-worker:cache:' + svn_cache,
Expand Down
94 changes: 77 additions & 17 deletions taskcluster/taskgraph/transforms/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

import json
import time
from copy import deepcopy

from taskgraph.util.treeherder import split_symbol
from taskgraph.transforms.base import TransformSequence
Expand Down Expand Up @@ -212,28 +213,68 @@
Optional('retry-exit-status'): int,

}, {
# see http://schemas.taskcluster.net/generic-worker/v1/payload.json
# and https://docs.taskcluster.net/reference/workers/generic-worker/payload
Required('implementation'): 'generic-worker',

# command is a list of commands to run, sequentially
Required('command'): [taskref_or_string],
# on Windows, each command is a string, on OS X and Linux, each command is
# a string array
Required('command'): Any(
[taskref_or_string], # Windows
[[taskref_or_string]] # Linux / OS X
),

# artifacts to extract from the task image after completion; note that artifacts
# for the generic worker cannot have names
Optional('artifacts'): [{
# type of artifact -- simple file, or recursive directory
'type': Any('file', 'directory'),

# task image path from which to read artifact
# filesystem path from which to read artifact
'path': basestring,

# if not specified, path is used for artifact name
Optional('name'): basestring
}],

# directories and/or files to be mounted
# Directories and/or files to be mounted.
# The actual allowed combinations are stricter than the model below,
# but this provides a simple starting point.
# See https://docs.taskcluster.net/reference/workers/generic-worker/payload
Optional('mounts'): [{
# a unique name for the cache volume
'cache-name': basestring,
# A unique name for the cache volume, implies writable cache directory
# (otherwise mount is a read-only file or directory).
Optional('cache-name'): basestring,
# Optional content for pre-loading cache, or mandatory content for
# read-only file or directory. Pre-loaded content can come from either
# a task artifact or from a URL.
Optional('content'): {

# *** Either (artifact and task-id) or url must be specified. ***

# Artifact name that contains the content.
Optional('artifact'): basestring,
# Task ID that has the artifact that contains the content.
Optional('task-id'): taskref_or_string,
# URL that supplies the content in response to an unauthenticated
# GET request.
Optional('url'): basestring
},

# task image path for the cache
'path': basestring,
# *** Either file or directory must be specified. ***

# If mounting a cache or read-only directory, the filesystem location of
# the directory should be specified as a relative path to the task
# directory here.
Optional('directory'): basestring,
# If mounting a file, specify the relative path within the task
# directory to mount the file (the file will be read only).
Optional('file'): basestring,
# Required if and only if `content` is specified and mounting a
# directory (not a file). This should be the archive format of the
# content (either pre-loaded cache or read-only directory).
Optional('format'): Any('rar', 'tar.bz2', 'tar.gz', 'zip')
}],

# environment variables
Expand All @@ -244,6 +285,9 @@

# os user groups for test task workers
Optional('os-groups', default=[]): [basestring],

# optional features
Required('chain-of-trust', default=False): bool,
}, {
Required('implementation'): 'buildbot-bridge',

Expand Down Expand Up @@ -557,19 +601,26 @@ def build_generic_worker_payload(config, task, task_def):
artifacts = []

for artifact in worker['artifacts']:
artifacts.append({
a = {
'path': artifact['path'],
'type': artifact['type'],
'expires': task_def['expires'], # always expire with the task
})

mounts = []

for mount in worker.get('mounts', []):
mounts.append({
'cacheName': mount['cache-name'],
'directory': mount['path']
})
}
if 'name' in artifact:
a['name'] = artifact['name']
artifacts.append(a)

# Need to copy over mounts, but rename keys to respect naming convention
# * 'cache-name' -> 'cacheName'
# * 'task-id' -> 'taskId'
# All other key names are already suitable, and don't need renaming.
mounts = deepcopy(worker.get('mounts', []))
for mount in mounts:
if 'cache-name' in mount:
mount['cacheName'] = mount.pop('cache-name')
if 'content' in mount:
if 'task-id' in mount['content']:
mount['content']['taskId'] = mount['content'].pop('task-id')

task_def['payload'] = {
'command': worker['command'],
Expand All @@ -585,6 +636,15 @@ def build_generic_worker_payload(config, task, task_def):
if 'retry-exit-status' in worker:
raise Exception("retry-exit-status not supported in generic-worker")

# currently only support one feature (chain of trust) but this will likely grow
features = {}

if worker.get('chain-of-trust'):
features['chainOfTrust'] = True

if features:
task_def['payload']['features'] = features


@payload_builder('scriptworker-signing')
def build_scriptworker_signing_payload(config, task, task_def):
Expand Down
Loading

0 comments on commit 7edd039

Please sign in to comment.