Factoring out base class for trigger_multiple_dimensions.py.

This will be used by perf team for intercepting trigger call
to trigger on specific bot ids.

Bug: 758630
Cq-Include-Trybots: master.tryserver.chromium.android:android_optional_gpu_tests_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel
Change-Id: Ib5152748169ccd50d01dccc1f61979bcb967a481
Reviewed-on: https://chromium-review.googlesource.com/896365
Reviewed-by: Emily Hanley <[email protected]>
Reviewed-by: Ashley Enstad <[email protected]>
Reviewed-by: Kenneth Russell <[email protected]>
Commit-Queue: Emily Hanley <[email protected]>
Cr-Commit-Position: refs/heads/master@{#535001}
diff --git a/testing/buildbot/chromium.perf.fyi.json b/testing/buildbot/chromium.perf.fyi.json
index b24daa1..c4ccecd 100644
--- a/testing/buildbot/chromium.perf.fyi.json
+++ b/testing/buildbot/chromium.perf.fyi.json
@@ -553,14 +553,15 @@
           "expiration": 36000,
           "hard_timeout": 10800,
           "ignore_task_failure": false,
-          "io_timeout": 3600
+          "io_timeout": 3600,
+          "shards": 2
         },
         "trigger_script": {
           "args": [
-            "--bot-id=swarm846-c4",
-            "--bot-id=swarm847-c4"
+            "--multiple-trigger-configs",
+            "[{\"id\": \"swarm846-c4\", \"pool\": \"Chrome-perf-fyi\"}, {\"id\": \"swarm847-c4\", \"pool\": \"Chrome-perf-fyi\"}]"
           ],
-          "script": "//tools/perf/perf_device_trigger.py"
+          "script": "//testing/trigger_scripts/perf_device_trigger.py"
         }
       }
     ]
diff --git a/testing/scripts/run_performance_tests.py b/testing/scripts/run_performance_tests.py
index b2db0962..d90e379 100755
--- a/testing/scripts/run_performance_tests.py
+++ b/testing/scripts/run_performance_tests.py
@@ -70,7 +70,6 @@
       '--isolated-script-test-filter', type=str, required=False)
   parser.add_argument('--xvfb', help='Start xvfb.', action='store_true')
   parser.add_argument('--output-format', action='append')
-  parser.add_argument('--builder', required=True)
   parser.add_argument('--bot', required=True,
                       help='Bot ID to use to determine which tests to run. Will'
                            ' use //tools/perf/core/benchmark_sharding_map.json'
@@ -84,7 +83,7 @@
 
   with open(sharding_map_path()) as f:
     sharding_map = json.load(f)
-  sharding = sharding_map[args.builder][args.bot]['benchmarks']
+  sharding = sharding_map[args.bot]['benchmarks']
   return_code = 0
 
   for benchmark in sharding:
diff --git a/testing/trigger_scripts/base_test_triggerer.py b/testing/trigger_scripts/base_test_triggerer.py
new file mode 100755
index 0000000..98f19e0
--- /dev/null
+++ b/testing/trigger_scripts/base_test_triggerer.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Custom swarming base trigger class.
+
+This base class consolidates custom swarming triggering logic, to allow one bot
+to conceptually span multiple Swarming configurations, while lumping all trigger
+calls under one logical step.  It also gives the subclasses the ability to
+define their own logic for pruning the configurations they want to trigger
+jobs on and what configurations to use.
+
+See trigger_multiple_dimensions.py for an example of how to use this base class.
+
+"""
+
+import argparse
+import copy
+import json
+import os
+import random
+import subprocess
+import sys
+import tempfile
+import urllib
+
+
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(
+  __file__))))
+
+SWARMING_PY = os.path.join(SRC_DIR, 'tools', 'swarming_client', 'swarming.py')
+
+
+def strip_unicode(obj):
+  """Recursively re-encodes strings as utf-8 inside |obj|. Returns the result.
+  """
+  if isinstance(obj, unicode):
+    return obj.encode('utf-8', 'replace')
+
+  if isinstance(obj, list):
+    return list(map(strip_unicode, obj))
+
+  if isinstance(obj, dict):
+    new_obj = type(obj)(
+        (strip_unicode(k), strip_unicode(v)) for k, v in obj.iteritems() )
+    return new_obj
+
+  return obj
+
+
+class BaseTestTriggerer(object):
+  def __init__(self):
+    self._bot_configs = None
+    self._bot_statuses = []
+    self._total_bots = 0
+
+
+  def modify_args(self, all_args, bot_index, shard_index, total_shards,
+                  temp_file):
+    """Modifies the given argument list.
+
+    Specifically, it does the following:
+      * Adds a --dump_json argument, to read in the results of the
+        individual trigger command.
+      * Adds the dimensions associated with the bot config at the given index.
+      * If the number of shards is greater than one, adds --env
+        arguments to set the GTEST_SHARD_INDEX and GTEST_TOTAL_SHARDS
+        environment variables to _shard_index_ and _total_shards_,
+        respectively.
+
+    The arguments are structured like this:
+    <args to swarming.py trigger> -- <args to bot running isolate>
+    This means we have to add arguments to specific locations in the argument
+    list, to either affect the trigger command, or what the bot runs.
+
+    """
+
+
+
+    assert '--' in all_args, (
+        'Malformed trigger command; -- argument expected but not found')
+    dash_ind = all_args.index('--')
+    bot_args = ['--dump-json', temp_file]
+    if total_shards > 1:
+      bot_args.append('--env')
+      bot_args.append('GTEST_SHARD_INDEX')
+      bot_args.append(str(shard_index))
+      bot_args.append('--env')
+      bot_args.append('GTEST_TOTAL_SHARDS')
+      bot_args.append(str(total_shards))
+    for key, val in sorted(self._bot_configs[bot_index].iteritems()):
+      bot_args.append('--dimension')
+      bot_args.append(key)
+      bot_args.append(val)
+    return self.append_additional_args(
+        all_args[:dash_ind] + bot_args + all_args[dash_ind:])
+
+  def append_additional_args(self, args):
+    """ Gives subclasses ability to append additional args if necessary
+
+    Base class just returns given get."""
+    return args
+
+  def parse_bot_configs(self, args):
+    try:
+      self._bot_configs = strip_unicode(json.loads(
+        args.multiple_trigger_configs))
+    except Exception as e:
+      raise ValueError('Error while parsing JSON from bot config string %s: %s'
+                       % (args.multiple_trigger_configs, str(e)))
+    # Validate the input.
+    if not isinstance(self._bot_configs, list):
+      raise ValueError('Bot configurations must be a list, were: %s' %
+                       args.multiple_trigger_configs)
+    if len(self._bot_configs) < 1:
+      raise ValueError('Bot configuration list must have at least one entry')
+    if not all(isinstance(entry, dict) for entry in self._bot_configs):
+      raise ValueError('Bot configurations must all be dictionaries')
+
+  def query_swarming_for_bot_configs(self, verbose):
+    # Query Swarming to figure out which bots are available.
+    for config in self._bot_configs:
+      values = []
+      for key, value in sorted(config.iteritems()):
+        values.append(('dimensions', '%s:%s' % (key, value)))
+      # Ignore dead and quarantined bots.
+      values.append(('is_dead', 'FALSE'))
+      values.append(('quarantined', 'FALSE'))
+      query_arg = urllib.urlencode(values)
+
+      temp_file = self.make_temp_file(prefix='base_trigger_dimensions',
+                                      suffix='.json')
+      try:
+        ret = self.run_swarming(['query',
+                                 '-S',
+                                 'chromium-swarm.appspot.com',
+                                 '--limit',
+                                 '0',
+                                 '--json',
+                                 temp_file,
+                                 ('bots/count?%s' % query_arg)],
+                                verbose)
+        if ret:
+          raise Exception('Error running swarming.py')
+        with open(temp_file) as fp:
+          query_result = strip_unicode(json.load(fp))
+        # Summarize number of available bots per configuration.
+        count = int(query_result['count'])
+        # Be robust against errors in computation.
+        available = max(0, count - int(query_result['busy']))
+        self._bot_statuses.append({'total': count, 'available': available})
+        if verbose:
+          idx = len(self._bot_statuses) - 1
+          print 'Bot config %d: %s' % (idx, str(self._bot_statuses[idx]))
+      finally:
+        self.delete_temp_file(temp_file)
+    # Sum up the total count of all bots.
+    self._total_bots = sum(x['total'] for x in self._bot_statuses)
+    if verbose:
+      print 'Total bots: %d' % (self._total_bots)
+
+  def remove_swarming_dimension(self, args, dimension):
+    for i in xrange(len(args)):
+      if args[i] == '--dimension' and args[i+1] == dimension:
+        return args[:i] + args[i+3:]
+    return args
+
+  def make_temp_file(self, prefix=None, suffix=None):
+    # This trick of closing the file handle is needed on Windows in order to
+    # make the file writeable.
+    h, temp_file = tempfile.mkstemp(prefix=prefix, suffix=suffix)
+    os.close(h)
+    return temp_file
+
+  def delete_temp_file(self, temp_file):
+    os.remove(temp_file)
+
+  def read_json_from_temp_file(self, temp_file):
+    with open(temp_file) as f:
+      return json.load(f)
+
+  def write_json_to_file(self, merged_json, output_file):
+    with open(output_file, 'w') as f:
+      json.dump(merged_json, f)
+
+  def run_swarming(self, args, verbose):
+    if verbose:
+      print 'Running Swarming with args:'
+      print str(args)
+    return subprocess.call([sys.executable, SWARMING_PY] + args)
+
+  def prune_test_specific_configs(self, args, verbose):
+    # Ability for base class to further prune configs to
+    # run tests on.
+    pass
+
+  def select_config_indices(self, args, verbose):
+    # Main implementation for base class to determine what
+    # configs to trigger jobs on from self._bot_configs.
+    # Returns a list of indices into the self._bot_configs and
+    # len(args.shards) == len(selected_indices).
+    pass
+
+  def trigger_tasks(self, args, remaining):
+    """Triggers tasks for each bot.
+
+    Args:
+      args: Parsed arguments which we need to use.
+      remaining: The remainder of the arguments, which should be passed to
+                 swarming.py calls.
+
+    Returns:
+      Exit code for the script.
+    """
+    verbose = args.multiple_dimension_script_verbose
+    self.parse_bot_configs(args)
+    # Prunes config list to the exact set of configurations to trigger jobs on.
+    # This logic is specific to the base class if they want to prune list
+    # further.
+    self.prune_test_specific_configs(args, verbose)
+
+    # In the remaining arguments, find the Swarming dimensions that are
+    # specified by the bot configs and remove them, because for each shard,
+    # we're going to select one of the bot configs and put all of its Swarming
+    # dimensions on the command line.
+    filtered_remaining_args = copy.deepcopy(remaining)
+    for config in self._bot_configs:
+      for k in config.iterkeys():
+        filtered_remaining_args = self.remove_swarming_dimension(
+          filtered_remaining_args, k)
+
+    merged_json = {}
+
+    # Choose selected configs for this run of the test suite.
+    selected_configs = self.select_config_indices(args, verbose)
+    for i in xrange(args.shards):
+      # For each shard that we're going to distribute, do the following:
+      # 1. Pick which bot configuration to use.
+      # 2. Insert that bot configuration's dimensions as command line
+      #    arguments, and invoke "swarming.py trigger".
+      bot_index = selected_configs[i]
+      # Holds the results of the swarming.py trigger call.
+      try:
+        json_temp = self.make_temp_file(prefix='base_trigger_dimensions',
+                                        suffix='.json')
+        args_to_pass = self.modify_args(filtered_remaining_args, bot_index, i,
+                                        args.shards, json_temp)
+        ret = self.run_swarming(args_to_pass, verbose)
+        if ret:
+          sys.stderr.write('Failed to trigger a task, aborting\n')
+          return ret
+        result_json = self.read_json_from_temp_file(json_temp)
+        if i == 0:
+          # Copy the entire JSON -- in particular, the "request"
+          # dictionary -- from shard 0. "swarming.py collect" uses
+          # some keys from this dictionary, in particular related to
+          # expiration. It also contains useful debugging information.
+          merged_json = copy.deepcopy(result_json)
+          # However, reset the "tasks" entry to an empty dictionary,
+          # which will be handled specially.
+          merged_json['tasks'] = {}
+        for k, v in result_json['tasks'].items():
+          v['shard_index'] = i
+          merged_json['tasks'][k + ':%d:%d' % (i, args.shards)] = v
+      finally:
+        self.delete_temp_file(json_temp)
+    self.write_json_to_file(merged_json, args.dump_json)
+    return 0
+
+
+  def setup_parser_contract(self, parser):
+    parser.add_argument('--multiple-trigger-configs', type=str, required=True,
+                        help='The Swarming configurations to trigger tasks on, '
+                        'in the form of a JSON array of dictionaries (these are'
+                        ' Swarming dimension_sets). At least one entry in this '
+                        'dictionary is required.')
+    parser.add_argument('--multiple-dimension-script-verbose', type=bool,
+                        default=False, help='Turn on verbose logging')
+    parser.add_argument('--dump-json', required=True,
+                        help='(Swarming Trigger Script API) Where to dump the'
+                        ' resulting json which indicates which tasks were'
+                        ' triggered for which shards.')
+    parser.add_argument('--shards', type=int, default=1,
+                        help='How many shards to trigger. Duplicated from the'
+                       ' `swarming.py trigger` command.')
+    return parser
+
diff --git a/testing/trigger_scripts/perf_device_trigger.py b/testing/trigger_scripts/perf_device_trigger.py
new file mode 100755
index 0000000..86fcffe
--- /dev/null
+++ b/testing/trigger_scripts/perf_device_trigger.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Custom swarming triggering script.
+
+This script does custom swarming triggering logic, to enable device affinity
+for our bots, while lumping all trigger calls under one logical step.
+
+This script receives multiple machine configurations on the command line in the
+form of quoted strings. These strings are JSON dictionaries that represent
+entries in the "dimensions" array of the "swarming" dictionary in the
+src/testing/buildbot JSON files.
+
+Scripts inheriting must have roughly the same command line interface as
+swarming.py trigger. It modifies it in the following ways:
+ * Intercepts the dump-json argument, and creates its own by combining the
+   results from each trigger call.
+ * Scans through the multiple-trigger-configs dictionaries. For any key found,
+   deletes that dimension from the originally triggered task's dimensions. This
+   is what allows the Swarming dimensions to be replaced.  Must contain the
+   dimension "id" for the perf device affinity use case.
+ * On a per-shard basis, adds the Swarming dimensions chosen from the
+   multiple-trigger-configs list to the dimensions for the shard.
+
+This script is normally called from the swarming recipe module in tools/build.
+
+"""
+
+import argparse
+import json
+import os
+import subprocess
+import sys
+import tempfile
+
+import base_test_triggerer
+
+class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer):
+  def __init__(self):
+    super(PerfDeviceTriggerer, self).__init__()
+
+  def append_additional_args(self, args):
+    if 'id' in args:
+      # Adds the bot id as an argument to the test.
+      return args + ['--bot', args[args.index('id') + 1]]
+    else:
+      raise Exception('Id must be present for perf device triggering')
+
+  def select_config_indices(self, args, verbose):
+    # For perf we want to trigger a job for every valid config since
+    # each config represents exactly one bot in the perf swarming pool,
+    selected_configs = []
+    for i in xrange(args.shards):
+      selected_configs.append(i)
+    return selected_configs
+
+
+def main():
+  triggerer = PerfDeviceTriggerer()
+  # Setup args for common contract of base class
+  parser = triggerer.setup_parser_contract(
+      argparse.ArgumentParser(description=__doc__))
+  args, remaining = parser.parse_known_args()
+  return triggerer.trigger_tasks(args, remaining)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/testing/trigger_scripts/perf_device_trigger_unittest.py b/testing/trigger_scripts/perf_device_trigger_unittest.py
new file mode 100755
index 0000000..f6eb576
--- /dev/null
+++ b/testing/trigger_scripts/perf_device_trigger_unittest.py
@@ -0,0 +1,146 @@
+#!/usr/bin/python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for perf_device_trigger_unittest.py."""
+
+import unittest
+
+import perf_device_trigger
+
+class Args(object):
+  def __init__(self):
+    self.shards = 1
+    self.dump_json = ''
+    self.multiple_trigger_configs = []
+    self.multiple_dimension_script_verbose = False
+
+
+class FakeTriggerer(perf_device_trigger.PerfDeviceTriggerer):
+  def __init__(self, bot_configs):
+    super(FakeTriggerer, self).__init__()
+    self._bot_configs = bot_configs
+    self._bot_statuses = []
+    self._swarming_runs = []
+    self._files = {}
+    self._temp_file_id = 0
+
+  def set_files(self, files):
+    self._files = files
+
+  def make_temp_file(self, prefix=None, suffix=None):
+    result = prefix + str(self._temp_file_id) + suffix
+    self._temp_file_id += 1
+    return result
+
+  def delete_temp_file(self, temp_file):
+    pass
+
+  def read_json_from_temp_file(self, temp_file):
+    return self._files[temp_file]
+
+  def write_json_to_file(self, merged_json, output_file):
+    self._files[output_file] = merged_json
+
+  def parse_bot_configs(self, args):
+    pass
+
+  def run_swarming(self, args, verbose):
+    self._swarming_runs.append(args)
+
+
+PERF_BOT1 = {
+  'pool': 'Chrome-perf-fyi',
+  'id': 'build1'
+}
+
+PERF_BOT2 = {
+  'pool': 'Chrome-perf-fyi',
+  'id': 'build2'
+}
+
+class UnitTest(unittest.TestCase):
+  def basic_setup(self):
+    triggerer = FakeTriggerer(
+      [
+        PERF_BOT1,
+        PERF_BOT2
+      ]
+    )
+    # Note: the contents of these JSON files don't accurately reflect
+    # that produced by "swarming.py trigger". The unit tests only
+    # verify that shard 0's JSON is preserved.
+    triggerer.set_files({
+      'base_trigger_dimensions0.json': {
+        'base_task_name': 'webgl_conformance_tests',
+        'request': {
+          'expiration_secs': 3600,
+          'properties': {
+            'execution_timeout_secs': 3600,
+          },
+        },
+        'tasks': {
+          'webgl_conformance_tests on NVIDIA GPU on Windows': {
+            'task_id': 'f001',
+          },
+        },
+      },
+      'base_trigger_dimensions1.json': {
+        'tasks': {
+          'webgl_conformance_tests on NVIDIA GPU on Windows': {
+            'task_id': 'f002',
+          },
+        },
+      },
+    })
+    args = Args()
+    args.shards = 2
+    args.dump_json = 'output.json'
+    args.multiple_dimension_script_verbose = False
+    triggerer.trigger_tasks(
+      args,
+      [
+        'trigger',
+        '--dimension',
+        'pool',
+        'chrome-perf-fyi',
+        '--dimension',
+        'id',
+        'build1',
+        '--',
+        'benchmark1',
+      ])
+    return triggerer
+
+  def list_contains_sublist(self, main_list, sub_list):
+    return any(sub_list == main_list[offset:offset + len(sub_list)]
+               for offset in xrange(len(main_list) - (len(sub_list) - 1)))
+
+  def test_shard_env_vars_and_bot_id(self):
+    triggerer = self.basic_setup()
+    self.assertTrue(self.list_contains_sublist(
+      triggerer._swarming_runs[0], ['--bot', 'build1']))
+    self.assertTrue(self.list_contains_sublist(
+      triggerer._swarming_runs[1], ['--bot', 'build2']))
+    self.assertTrue(self.list_contains_sublist(
+      triggerer._swarming_runs[0], ['--env', 'GTEST_SHARD_INDEX', '0']))
+    self.assertTrue(self.list_contains_sublist(
+      triggerer._swarming_runs[1], ['--env', 'GTEST_SHARD_INDEX', '1']))
+    self.assertTrue(self.list_contains_sublist(
+      triggerer._swarming_runs[0], ['--env', 'GTEST_TOTAL_SHARDS', '2']))
+    self.assertTrue(self.list_contains_sublist(
+      triggerer._swarming_runs[1], ['--env', 'GTEST_TOTAL_SHARDS', '2']))
+
+  def test_json_merging(self):
+    triggerer = self.basic_setup()
+    self.assertTrue('output.json' in triggerer._files)
+    output_json = triggerer._files['output.json']
+    self.assertTrue('base_task_name' in output_json)
+    self.assertTrue('request' in output_json)
+    self.assertEqual(output_json['request']['expiration_secs'], 3600)
+    self.assertEqual(
+      output_json['request']['properties']['execution_timeout_secs'], 3600)
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/testing/trigger_scripts/trigger_multiple_dimensions.py b/testing/trigger_scripts/trigger_multiple_dimensions.py
index cb8723cf..c17984a3 100755
--- a/testing/trigger_scripts/trigger_multiple_dimensions.py
+++ b/testing/trigger_scripts/trigger_multiple_dimensions.py
@@ -64,135 +64,12 @@
 import tempfile
 import urllib
 
-
-SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(
-  __file__))))
-
-SWARMING_PY = os.path.join(SRC_DIR, 'tools', 'swarming_client', 'swarming.py')
+import base_test_triggerer
 
 
-def strip_unicode(obj):
-  """Recursively re-encodes strings as utf-8 inside |obj|. Returns the result.
-  """
-  if isinstance(obj, unicode):
-    return obj.encode('utf-8', 'replace')
-
-  if isinstance(obj, list):
-    return list(map(strip_unicode, obj))
-
-  if isinstance(obj, dict):
-    new_obj = type(obj)(
-        (strip_unicode(k), strip_unicode(v)) for k, v in obj.iteritems() )
-    return new_obj
-
-  return obj
-
-
-class MultiDimensionTestTriggerer(object):
+class MultiDimensionTestTriggerer(base_test_triggerer.BaseTestTriggerer):
   def __init__(self):
-    self._bot_configs = None
-    self._bot_statuses = []
-    self._total_bots = 0
-
-  def modify_args(self, all_args, bot_index, shard_index, total_shards,
-                  temp_file):
-    """Modifies the given argument list.
-
-    Specifically, it does the following:
-      * Adds a --dump_json argument, to read in the results of the
-        individual trigger command.
-      * Adds the dimensions associated with the bot config at the given index.
-      * If the number of shards is greater than one, adds --env
-        arguments to set the GTEST_SHARD_INDEX and GTEST_TOTAL_SHARDS
-        environment variables to _shard_index_ and _total_shards_,
-        respectively.
-
-    The arguments are structured like this:
-    <args to swarming.py trigger> -- <args to bot running isolate>
-    This means we have to add arguments to specific locations in the argument
-    list, to either affect the trigger command, or what the bot runs.
-
-    """
-    assert '--' in all_args, (
-        'Malformed trigger command; -- argument expected but not found')
-    dash_ind = all_args.index('--')
-    bot_args = ['--dump-json', temp_file]
-    if total_shards > 1:
-      bot_args.append('--env')
-      bot_args.append('GTEST_SHARD_INDEX')
-      bot_args.append(str(shard_index))
-      bot_args.append('--env')
-      bot_args.append('GTEST_TOTAL_SHARDS')
-      bot_args.append(str(total_shards))
-    for key, val in sorted(self._bot_configs[bot_index].iteritems()):
-      bot_args.append('--dimension')
-      bot_args.append(key)
-      bot_args.append(val)
-    return all_args[:dash_ind] + bot_args + all_args[dash_ind:]
-
-  def parse_bot_configs(self, args):
-    try:
-      self._bot_configs = strip_unicode(json.loads(
-        args.multiple_trigger_configs))
-    except Exception as e:
-      raise ValueError('Error while parsing JSON from bot config string %s: %s'
-                       % (args.multiple_trigger_configs, str(e)))
-    # Validate the input.
-    if not isinstance(self._bot_configs, list):
-      raise ValueError('Bot configurations must be a list, were: %s' %
-                       args.multiple_trigger_configs)
-    if len(self._bot_configs) < 1:
-      raise ValueError('Bot configuration list must have at least one entry')
-    if not all(isinstance(entry, dict) for entry in self._bot_configs):
-      raise ValueError('Bot configurations must all be dictionaries')
-
-  def query_swarming_for_bot_configs(self, verbose):
-    # Query Swarming to figure out which bots are available.
-    for config in self._bot_configs:
-      values = []
-      for key, value in sorted(config.iteritems()):
-        values.append(('dimensions', '%s:%s' % (key, value)))
-      # Ignore dead and quarantined bots.
-      values.append(('is_dead', 'FALSE'))
-      values.append(('quarantined', 'FALSE'))
-      query_arg = urllib.urlencode(values)
-
-      temp_file = self.make_temp_file(prefix='trigger_multiple_dimensions',
-                                      suffix='.json')
-      try:
-        ret = self.run_swarming(['query',
-                                 '-S',
-                                 'chromium-swarm.appspot.com',
-                                 '--limit',
-                                 '0',
-                                 '--json',
-                                 temp_file,
-                                 ('bots/count?%s' % query_arg)],
-                                verbose)
-        if ret:
-          raise Exception('Error running swarming.py')
-        with open(temp_file) as fp:
-          query_result = strip_unicode(json.load(fp))
-        # Summarize number of available bots per configuration.
-        count = int(query_result['count'])
-        # Be robust against errors in computation.
-        available = max(0, count - int(query_result['busy']))
-        self._bot_statuses.append({'total': count, 'available': available})
-        if verbose:
-          idx = len(self._bot_statuses) - 1
-          print 'Bot config %d: %s' % (idx, str(self._bot_statuses[idx]))
-      finally:
-        self.delete_temp_file(temp_file)
-    # Sum up the total count of all bots.
-    self._total_bots = sum(x['total'] for x in self._bot_statuses)
-    if verbose:
-      print 'Total bots: %d' % (self._total_bots)
-
-  def remove_swarming_dimension(self, args, dimension):
-    for i in xrange(len(args)):
-      if args[i] == '--dimension' and args[i+1] == dimension:
-        return args[:i] + args[i+3:]
-    return args
+    super(MultiDimensionTestTriggerer, self).__init__()
 
   def choose_random_int(self, max_num):
     return random.randint(1, max_num)
@@ -234,112 +111,22 @@
       r -= status['total']
     raise Exception('Should not reach here')
 
-  def make_temp_file(self, prefix=None, suffix=None):
-    # This trick of closing the file handle is needed on Windows in order to
-    # make the file writeable.
-    h, temp_file = tempfile.mkstemp(prefix=prefix, suffix=suffix)
-    os.close(h)
-    return temp_file
+  def select_config_indices(self, args, verbose):
+    selected_indices = []
+    for _ in xrange(args.shards):
+      selected_indices.append(self.pick_bot_configuration(verbose))
+    return selected_indices
 
-  def delete_temp_file(self, temp_file):
-    os.remove(temp_file)
-
-  def read_json_from_temp_file(self, temp_file):
-    with open(temp_file) as f:
-      return json.load(f)
-
-  def write_json_to_file(self, merged_json, output_file):
-    with open(output_file, 'w') as f:
-      json.dump(merged_json, f)
-
-  def run_swarming(self, args, verbose):
-    if verbose:
-      print 'Running Swarming with args:'
-      print str(args)
-    return subprocess.call([sys.executable, SWARMING_PY] + args)
-
-  def trigger_tasks(self, args, remaining):
-    """Triggers tasks for each bot.
-
-    Args:
-      args: Parsed arguments which we need to use.
-      remaining: The remainder of the arguments, which should be passed to
-                 swarming.py calls.
-
-    Returns:
-      Exit code for the script.
-    """
-    verbose = args.multiple_dimension_script_verbose
-    self.parse_bot_configs(args)
+  def prune_test_specific_configs(self, args, verbose):
     self.query_swarming_for_bot_configs(verbose)
 
-    # In the remaining arguments, find the Swarming dimensions that are
-    # specified by the bot configs and remove them, because for each shard,
-    # we're going to select one of the bot configs and put all of its Swarming
-    # dimensions on the command line.
-    filtered_remaining_args = copy.deepcopy(remaining)
-    for config in self._bot_configs:
-      for k in config.iterkeys():
-        filtered_remaining_args = self.remove_swarming_dimension(
-          filtered_remaining_args, k)
-
-    merged_json = {}
-
-    for i in xrange(args.shards):
-      # For each shard that we're going to distribute, do the following:
-      # 1. Pick which bot configuration to use.
-      # 2. Insert that bot configuration's dimensions as command line
-      #    arguments, and invoke "swarming.py trigger".
-      bot_index = self.pick_bot_configuration(verbose)
-      # Holds the results of the swarming.py trigger call.
-      try:
-        json_temp = self.make_temp_file(prefix='trigger_multiple_dimensions',
-                                        suffix='.json')
-        args_to_pass = self.modify_args(filtered_remaining_args, bot_index, i,
-                                        args.shards, json_temp)
-        ret = self.run_swarming(args_to_pass, verbose)
-        if ret:
-          sys.stderr.write('Failed to trigger a task, aborting\n')
-          return ret
-        result_json = self.read_json_from_temp_file(json_temp)
-        if i == 0:
-          # Copy the entire JSON -- in particular, the "request"
-          # dictionary -- from shard 0. "swarming.py collect" uses
-          # some keys from this dictionary, in particular related to
-          # expiration. It also contains useful debugging information.
-          merged_json = copy.deepcopy(result_json)
-          # However, reset the "tasks" entry to an empty dictionary,
-          # which will be handled specially.
-          merged_json['tasks'] = {}
-        for k, v in result_json['tasks'].items():
-          v['shard_index'] = i
-          merged_json['tasks'][k + ':%d:%d' % (i, args.shards)] = v
-      finally:
-        self.delete_temp_file(json_temp)
-    self.write_json_to_file(merged_json, args.dump_json)
-    return 0
-
-
 def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--multiple-trigger-configs', type=str, required=True,
-                      help='The Swarming configurations to trigger tasks on, '
-                      'in the form of a JSON array of dictionaries (these are '
-                      'Swarming dimension_sets). At least one entry in this '
-                      'dictionary is required.')
-  parser.add_argument('--multiple-dimension-script-verbose', type=bool,
-                      default=False, help='Turn on verbose logging')
-  parser.add_argument('--dump-json', required=True,
-                      help='(Swarming Trigger Script API) Where to dump the'
-                      ' resulting json which indicates which tasks were'
-                      ' triggered for which shards.')
-  parser.add_argument('--shards', type=int, default=1,
-                      help='How many shards to trigger. Duplicated from the'
-                      ' `swarming.py trigger` command.')
-
+  triggerer =  MultiDimensionTestTriggerer()
+  # setup args for common contract of base class
+  parser = triggerer.setup_parser_contract(
+      argparse.ArgumentParser(description=__doc__))
   args, remaining = parser.parse_known_args()
-
-  return MultiDimensionTestTriggerer().trigger_tasks(args, remaining)
+  return triggerer.trigger_tasks(args, remaining)
 
 
 if __name__ == '__main__':
diff --git a/testing/trigger_scripts/trigger_multiple_dimensions_unittest.py b/testing/trigger_scripts/trigger_multiple_dimensions_unittest.py
index a0a3769..1cd2a27 100755
--- a/testing/trigger_scripts/trigger_multiple_dimensions_unittest.py
+++ b/testing/trigger_scripts/trigger_multiple_dimensions_unittest.py
@@ -99,7 +99,7 @@
     # that produced by "swarming.py trigger". The unit tests only
     # verify that shard 0's JSON is preserved.
     triggerer.set_files({
-      'trigger_multiple_dimensions0.json': {
+      'base_trigger_dimensions0.json': {
         'base_task_name': 'webgl_conformance_tests',
         'request': {
           'expiration_secs': 3600,
@@ -113,7 +113,7 @@
           },
         },
       },
-      'trigger_multiple_dimensions1.json': {
+      'base_trigger_dimensions1.json': {
         'tasks': {
           'webgl_conformance_tests on NVIDIA GPU on Windows': {
             'task_id': 'f002',
@@ -145,7 +145,7 @@
 
   def list_contains_sublist(self, main_list, sub_list):
     return any(sub_list == main_list[offset:offset + len(sub_list)]
-               for offset in xrange(len(main_list) - len(sub_list)))
+               for offset in xrange(len(main_list) - (len(sub_list) - 1)))
 
   def shard_runs_on_os(self, triggerer, shard_index, os):
     return self.list_contains_sublist(triggerer._swarming_runs[shard_index],
diff --git a/tools/perf/chromium.perf.fyi.extras.json b/tools/perf/chromium.perf.fyi.extras.json
index 87e276c..7d5a4e8 100644
--- a/tools/perf/chromium.perf.fyi.extras.json
+++ b/tools/perf/chromium.perf.fyi.extras.json
@@ -361,10 +361,10 @@
         },
         "trigger_script": {
           "args": [
-	    "--bot-id=swarm846-c4",
-	    "--bot-id=swarm847-c4"
-          ],
-          "script": "//tools/perf/perf_device_trigger.py"
+	    "--multiple-trigger-configs",
+	    "[{\"id\": \"swarm846-c4\", \"pool\": \"Chrome-perf-fyi\"}, {\"id\": \"swarm847-c4\", \"pool\": \"Chrome-perf-fyi\"}]"
+	  ],
+          "script": "//testing/trigger_scripts/perf_device_trigger.py"
         },
         "swarming": {
           "can_use_on_swarming_builders": true,
@@ -373,6 +373,7 @@
               "pool": "Chrome-perf-fyi"
             }
           ],
+	  "shards": 2,
           "expiration": 36000,
           "hard_timeout": 10800,
           "ignore_task_failure": false,
diff --git a/tools/perf/perf_device_trigger.py b/tools/perf/perf_device_trigger.py
deleted file mode 100755
index a3755e13..0000000
--- a/tools/perf/perf_device_trigger.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Custom swarming triggering script.
-
-This script does custom swarming triggering logic, to enable device affinity
-for our bots, while lumping all trigger calls under one logical step.
-
-This script must have roughly the same command line interface as swarming.py
-trigger. It modifies it in the following ways:
- * Inserts bot id into swarming trigger dimensions, and swarming task arguments.
- * Intercepts the dump-json argument, and creates its own by combining the
-   results from each trigger call.
-
-This script is normally called from the swarming recipe module in tools/build.
-"""
-
-import argparse
-import json
-import os
-import subprocess
-import sys
-import tempfile
-
-from core import path_util
-
-
-def get_swarming_py_path():
-  return os.path.join(
-      path_util.GetChromiumSrcDir(), 'tools', 'swarming_client', 'swarming.py')
-
-
-def modify_args(all_args, bot_id, temp_file, total_shards, shard_num):
-  """Modifies the given argument list.
-
-  Specifically, it does the following:
-    * Adds a --dump_json argument, to read in the results of the
-      individual trigger command.
-    * Adds a bot id dimension.
-    * Adds the bot id as an argument to the test, so it knows which tests to
-      run.
-
-  The arguments are structured like this:
-  <args to swarming.py trigger> -- <args to bot running isolate>
-  This means we have to add arguments to specific locations in the argument
-  list, to either affect the trigger command, or what the bot runs.
-  """
-  assert '--' in all_args, (
-      'Malformed trigger command; -- argument expected but not found')
-  dash_ind = all_args.index('--')
-
-  # These two flags are required for the swarming task.
-  # In essence we are re-using swarmings existing sharding concept
-  # to be able to collect all n triggered tasks at once.
-  shard_index_flag = 'GTEST_SHARD_INDEX %d' % shard_num
-  total_shards_flag = 'GTEST_SHARD_SHARDS %d' % total_shards
-  # TODO: Do we need to drop the shard flag from swarming?
-  return (all_args[:dash_ind] + [
-      '--dump-json', temp_file, '--dimension', 'id', bot_id]
-      + ['--env', shard_index_flag] + ['--env', total_shards_flag]
-          + all_args[dash_ind:] + ['--bot', bot_id])
-
-
-def trigger_tasks(args, remaining):
-  """Triggers tasks for each bot.
-
-  Args:
-    args: Parsed arguments which we need to use.
-    remaining: The remainder of the arguments, which should be passed to
-               swarming.py calls.
-
-  Returns:
-    Exit code for the script.
-  """
-  merged_json = {'tasks': {}}
-
-  # TODO: Do these in parallel
-  for shard_num, bot_id in enumerate(args.bot_id):
-    print "Triggering shard %d on bot %s " % (shard_num, bot_id)
-    # Holds the results of the swarming.py trigger call.
-    temp_fd, json_temp = tempfile.mkstemp(prefix='perf_device_trigger')
-    try:
-      args_to_pass = modify_args(
-          remaining[:], bot_id, json_temp, len(args.bot_id), shard_num)
-
-      ret = subprocess.call([get_swarming_py_path()] + args_to_pass)
-      if ret:
-        sys.stderr.write('Failed to trigger a task, aborting\n')
-        return ret
-      with open(json_temp) as f:
-        result_json = json.load(f)
-
-      for k, v in result_json['tasks'].items():
-        v['shard_index'] = shard_num
-        merged_json['tasks'][k + ':%d:%d' % (shard_num, len(args.bot_id))] = v
-    finally:
-      os.close(temp_fd)
-      os.remove(json_temp)
-  with open(args.dump_json, 'w') as f:
-    json.dump(merged_json, f)
-  return 0
-
-
-def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--bot-id', action='append', required=True,
-                      help='Which bot IDs to trigger tasks on. Number of bot'
-                      ' IDs must match the number of shards. This is because'
-                      ' the recipe code uses the --shard argument to determine'
-                      ' how many shards it expects in the output json.')
-  parser.add_argument('--dump-json', required=True,
-                      help='(Swarming Trigger Script API) Where to dump the'
-                      ' resulting json which indicates which tasks were'
-                      ' triggered for which shards.')
-  args, remaining = parser.parse_known_args()
-
-  return trigger_tasks(args, remaining)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/perf/perf_device_trigger_unittest.py b/tools/perf/perf_device_trigger_unittest.py
deleted file mode 100644
index bbfa97f..0000000
--- a/tools/perf/perf_device_trigger_unittest.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import os
-import tempfile
-import unittest
-
-import mock
-
-import perf_device_trigger
-
-
-class FakeArgs(object):
-  def __init__(self, bot_id, dump_json):
-    self.bot_id = bot_id
-    self.dump_json = dump_json
-
-class PerfDeviceTriggerUnittest(unittest.TestCase):
-  def testBasic(self):
-    with mock.patch('perf_device_trigger.get_swarming_py_path') as py_path_mock:
-      py_path_mock.return_value = '/path/to/swarming.py'
-      with mock.patch('perf_device_trigger.subprocess.call') as call_mock:
-        call_mock.return_value = 0
-        m = mock.mock_open(read_data=json.dumps({
-            'tasks': {},
-        }))
-        with mock.patch('perf_device_trigger.open', m, create=True):
-          temp_fd, json_temp = tempfile.mkstemp(
-              prefix='perf_device_trigger_unittest')
-          try:
-            perf_device_trigger.trigger_tasks(
-                FakeArgs(['build1'], json_temp),
-                ['trigger', '--some', '--test', '--', 'args'])
-
-            call_mock.assert_called_once()
-
-            called_args, keyword = call_mock.call_args
-            self.assertEqual(keyword, {})
-            python_args = called_args[0]
-            json_ind = python_args.index('--dump-json')
-            # Remove --dump_json and its arg
-            python_args.pop(json_ind)
-            temp_json_path = python_args.pop(json_ind)
-            # We can't assert the exact name, since it's a temp file name. So
-            # just make sure it has the proper prefix. In order to do this in a
-            # platform independent way, just make sure that the path provided
-            # shares a common prefix with the path we made above (that's not
-            # just '/').
-            self.assertTrue(
-                len(os.path.commonprefix([temp_json_path, json_temp])) > 1)
-            self.assertEqual(
-              python_args, [
-              '/path/to/swarming.py', 'trigger',
-              '--some', '--test',
-              '--dimension', 'id', 'build1',
-              '--env', 'GTEST_SHARD_INDEX 0',
-              '--env', 'GTEST_SHARD_SHARDS 1',
-              '--',
-              'args', '--bot', 'build1'])
-          finally:
-            os.close(temp_fd)
-            os.remove(json_temp)