[email protected] | 4b135d7d | 2013-07-26 10:29:10 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | |
Avi Drissman | 6459548 | 2022-09-14 20:52:29 | [diff] [blame^] | 3 | # Copyright 2013 The Chromium Authors |
[email protected] | 4b135d7d | 2013-07-26 10:29:10 | [diff] [blame] | 4 | # Use of this source code is governed by a BSD-style license that can be |
| 5 | # found in the LICENSE file. |
| 6 | |
| 7 | import csv |
| 8 | import datetime |
| 9 | import json |
| 10 | import os |
| 11 | import shlex |
| 12 | import subprocess |
| 13 | import sys |
| 14 | from optparse import OptionParser |
| 15 | |
| 16 | """Start a client to fetch web pages either using wget or using quic_client. |
| 17 | If --use_wget is set, it uses wget. |
| 18 | Usage: This invocation |
| 19 | run_client.py --quic_binary_dir=../../../../out/Debug \ |
| 20 | --address=127.0.0.1 --port=5000 --infile=test_urls.json \ |
| 21 | --delay_file=delay.csv --packets_file=packets.csv |
| 22 | fetches pages listed in test_urls.json from a quic server running at |
| 23 | 127.0.0.1 on port 5000 using quic binary ../../../../out/Debug/quic_client |
| 24 | and stores the delay in delay.csv and the max received packet number (for |
| 25 | QUIC) in packets.csv. |
| 26 | If --use_wget is present, it will fetch the URLs using wget and ignores |
| 27 | the flags --address, --port, --quic_binary_dir, etc. |
| 28 | """ |
| 29 | |
| 30 | def Timestamp(datetm=None): |
| 31 | """Get the timestamp in microseconds. |
| 32 | Args: |
| 33 | datetm: the date and time to be converted to timestamp. |
| 34 | If not set, use the current UTC time. |
| 35 | Returns: |
| 36 | The timestamp in microseconds. |
| 37 | """ |
| 38 | datetm = datetm or datetime.datetime.utcnow() |
| 39 | diff = datetm - datetime.datetime.utcfromtimestamp(0) |
| 40 | timestamp = (diff.days * 86400 + diff.seconds) * 1000000 + diff.microseconds |
| 41 | return timestamp |
| 42 | |
| 43 | class PageloadExperiment: |
| 44 | def __init__(self, use_wget, quic_binary_dir, quic_server_address, |
| 45 | quic_server_port): |
| 46 | """Initialize PageloadExperiment. |
| 47 | |
| 48 | Args: |
| 49 | use_wget: Whether to use wget. |
| 50 | quic_binary_dir: Directory for quic_binary. |
| 51 | quic_server_address: IP address of quic server. |
| 52 | quic_server_port: Port of the quic server. |
| 53 | """ |
| 54 | self.use_wget = use_wget |
| 55 | self.quic_binary_dir = quic_binary_dir |
| 56 | self.quic_server_address = quic_server_address |
| 57 | self.quic_server_port = quic_server_port |
| 58 | if not use_wget and not os.path.isfile(quic_binary_dir + '/quic_client'): |
| 59 | raise IOError('There is no quic_client in the given dir: %s.' |
| 60 | % quic_binary_dir) |
| 61 | |
| 62 | @classmethod |
| 63 | def ReadPages(cls, json_file): |
| 64 | """Return the list of URLs from the json_file. |
| 65 | |
| 66 | One entry of the list may contain a html link and multiple resources. |
| 67 | """ |
| 68 | page_list = [] |
| 69 | with open(json_file) as f: |
| 70 | data = json.load(f) |
| 71 | for page in data['pages']: |
| 72 | url = page['url'] |
| 73 | if 'resources' in page: |
| 74 | resources = page['resources'] |
| 75 | else: |
| 76 | resources = None |
| 77 | if not resources: |
| 78 | page_list.append([url]) |
| 79 | else: |
| 80 | urls = [url] |
| 81 | # For url http://x.com/z/y.html, url_dir is http://x.com/z |
| 82 | url_dir = url.rsplit('/', 1)[0] |
| 83 | for resource in resources: |
| 84 | urls.append(url_dir + '/' + resource) |
| 85 | page_list.append(urls) |
| 86 | return page_list |
| 87 | |
| 88 | def DownloadOnePage(self, urls): |
| 89 | """Download a page emulated by a list of urls. |
| 90 | |
| 91 | Args: |
| 92 | urls: list of URLs to fetch. |
| 93 | Returns: |
| 94 | A tuple (page download time, max packet number). |
| 95 | """ |
| 96 | if self.use_wget: |
| 97 | cmd = 'wget -O -' |
| 98 | else: |
| 99 | cmd = '%s/quic_client --port=%s --address=%s' % ( |
| 100 | self.quic_binary_dir, self.quic_server_port, self.quic_server_address) |
| 101 | cmd_in_list = shlex.split(cmd) |
| 102 | cmd_in_list.extend(urls) |
| 103 | start_time = Timestamp() |
| 104 | ps_proc = subprocess.Popen(cmd_in_list, |
| 105 | stdout=subprocess.PIPE, |
| 106 | stderr=subprocess.PIPE) |
| 107 | _std_out, std_err = ps_proc.communicate() |
| 108 | end_time = Timestamp() |
| 109 | delta_time = end_time - start_time |
| 110 | max_packets = 0 |
| 111 | if not self.use_wget: |
| 112 | for line in std_err.splitlines(): |
| 113 | if line.find('Client: Got packet') >= 0: |
| 114 | elems = line.split() |
| 115 | packet_num = int(elems[4]) |
| 116 | max_packets = max(max_packets, packet_num) |
| 117 | return delta_time, max_packets |
| 118 | |
| 119 | def RunExperiment(self, infile, delay_file, packets_file=None, num_it=1): |
| 120 | """Run the pageload experiment. |
| 121 | |
| 122 | Args: |
| 123 | infile: Input json file describing the page list. |
| 124 | delay_file: Output file storing delay in csv format. |
| 125 | packets_file: Output file storing max packet number in csv format. |
| 126 | num_it: Number of iterations to run in this experiment. |
| 127 | """ |
| 128 | page_list = self.ReadPages(infile) |
| 129 | header = [urls[0].rsplit('/', 1)[1] for urls in page_list] |
| 130 | header0 = 'wget' if self.use_wget else 'quic' |
| 131 | header = [header0] + header |
| 132 | |
| 133 | plt_list = [] |
| 134 | packets_list = [] |
| 135 | for i in range(num_it): |
| 136 | plt_one_row = [str(i)] |
| 137 | packets_one_row = [str(i)] |
| 138 | for urls in page_list: |
| 139 | time_micros, num_packets = self.DownloadOnePage(urls) |
| 140 | time_secs = time_micros / 1000000.0 |
| 141 | plt_one_row.append('%6.3f' % time_secs) |
| 142 | packets_one_row.append('%5d' % num_packets) |
| 143 | plt_list.append(plt_one_row) |
| 144 | packets_list.append(packets_one_row) |
| 145 | |
| 146 | with open(delay_file, 'w') as f: |
| 147 | csv_writer = csv.writer(f, delimiter=',') |
| 148 | csv_writer.writerow(header) |
| 149 | for one_row in plt_list: |
| 150 | csv_writer.writerow(one_row) |
| 151 | if packets_file: |
| 152 | with open(packets_file, 'w') as f: |
| 153 | csv_writer = csv.writer(f, delimiter=',') |
| 154 | csv_writer.writerow(header) |
| 155 | for one_row in packets_list: |
| 156 | csv_writer.writerow(one_row) |
| 157 | |
| 158 | |
| 159 | def main(): |
| 160 | parser = OptionParser() |
| 161 | parser.add_option('--use_wget', dest='use_wget', action='store_true', |
| 162 | default=False) |
| 163 | # Note that only debug version generates the log containing packets |
| 164 | # information. |
| 165 | parser.add_option('--quic_binary_dir', dest='quic_binary_dir', |
| 166 | default='../../../../out/Debug') |
| 167 | # For whatever server address you specify, you need to run the |
| 168 | # quic_server on that machine and populate it with the cache containing |
| 169 | # the URLs requested in the --infile. |
| 170 | parser.add_option('--address', dest='quic_server_address', |
| 171 | default='127.0.0.1') |
| 172 | parser.add_option('--port', dest='quic_server_port', |
| 173 | default='5002') |
| 174 | parser.add_option('--delay_file', dest='delay_file', default='delay.csv') |
| 175 | parser.add_option('--packets_file', dest='packets_file', |
| 176 | default='packets.csv') |
| 177 | parser.add_option('--infile', dest='infile', default='test_urls.json') |
| 178 | (options, _) = parser.parse_args() |
| 179 | |
| 180 | exp = PageloadExperiment(options.use_wget, options.quic_binary_dir, |
| 181 | options.quic_server_address, |
| 182 | options.quic_server_port) |
| 183 | exp.RunExperiment(options.infile, options.delay_file, options.packets_file) |
| 184 | |
| 185 | if __name__ == '__main__': |
| 186 | sys.exit(main()) |