blob: d36686485f550cc8a3c18606a41e193c7978b7ef [file] [log] [blame]
[email protected]5a306a22014-02-24 22:13:591#!/usr/bin/env python
2# Copyright 2014 The Chromium Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""A git command for managing a local cache of git repositories."""
7
[email protected]848fd492014-04-09 19:06:448from __future__ import print_function
[email protected]5a306a22014-02-24 22:13:599import errno
10import logging
11import optparse
12import os
[email protected]174766f2014-05-13 21:27:4613import re
[email protected]5a306a22014-02-24 22:13:5914import tempfile
[email protected]f3726102014-04-17 17:24:1515import time
[email protected]aa1e1a42014-06-26 21:58:5116import shutil
[email protected]5a306a22014-02-24 22:13:5917import subprocess
18import sys
19import urlparse
[email protected]776a2c32014-04-25 07:54:2520import zipfile
[email protected]5a306a22014-02-24 22:13:5921
[email protected]563559c2014-04-02 00:36:2422from download_from_google_storage import Gsutil
[email protected]5a306a22014-02-24 22:13:5923import gclient_utils
24import subcommand
25
[email protected]301a7c32014-06-16 17:13:5026# Analogous to gc.autopacklimit git config.
27GC_AUTOPACKLIMIT = 50
28
[email protected]aa1e1a42014-06-26 21:58:5129GIT_CACHE_CORRUPT_MESSAGE = 'WARNING: The Git cache is corrupt.'
30
[email protected]848fd492014-04-09 19:06:4431try:
32 # pylint: disable=E0602
33 WinErr = WindowsError
34except NameError:
35 class WinErr(Exception):
36 pass
[email protected]5a306a22014-02-24 22:13:5937
38class LockError(Exception):
39 pass
40
[email protected]aa1e1a42014-06-26 21:58:5141class RefsHeadsFailedToFetch(Exception):
42 pass
[email protected]5a306a22014-02-24 22:13:5943
44class Lockfile(object):
45 """Class to represent a cross-platform process-specific lockfile."""
46
47 def __init__(self, path):
48 self.path = os.path.abspath(path)
49 self.lockfile = self.path + ".lock"
50 self.pid = os.getpid()
51
52 def _read_pid(self):
53 """Read the pid stored in the lockfile.
54
55 Note: This method is potentially racy. By the time it returns the lockfile
56 may have been unlocked, removed, or stolen by some other process.
57 """
58 try:
59 with open(self.lockfile, 'r') as f:
60 pid = int(f.readline().strip())
61 except (IOError, ValueError):
62 pid = None
63 return pid
64
65 def _make_lockfile(self):
66 """Safely creates a lockfile containing the current pid."""
67 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
68 fd = os.open(self.lockfile, open_flags, 0o644)
69 f = os.fdopen(fd, 'w')
[email protected]848fd492014-04-09 19:06:4470 print(self.pid, file=f)
[email protected]5a306a22014-02-24 22:13:5971 f.close()
72
73 def _remove_lockfile(self):
[email protected]f3726102014-04-17 17:24:1574 """Delete the lockfile. Complains (implicitly) if it doesn't exist.
75
76 See gclient_utils.py:rmtree docstring for more explanation on the
77 windows case.
78 """
79 if sys.platform == 'win32':
80 lockfile = os.path.normcase(self.lockfile)
81 for _ in xrange(3):
82 exitcode = subprocess.call(['cmd.exe', '/c',
83 'del', '/f', '/q', lockfile])
84 if exitcode == 0:
85 return
86 time.sleep(3)
87 raise LockError('Failed to remove lock: %s' % lockfile)
88 else:
89 os.remove(self.lockfile)
[email protected]5a306a22014-02-24 22:13:5990
91 def lock(self):
92 """Acquire the lock.
93
94 Note: This is a NON-BLOCKING FAIL-FAST operation.
95 Do. Or do not. There is no try.
96 """
97 try:
98 self._make_lockfile()
99 except OSError as e:
100 if e.errno == errno.EEXIST:
101 raise LockError("%s is already locked" % self.path)
102 else:
103 raise LockError("Failed to create %s (err %s)" % (self.path, e.errno))
104
105 def unlock(self):
106 """Release the lock."""
[email protected]108eced2014-06-19 21:22:43107 try:
108 if not self.is_locked():
109 raise LockError("%s is not locked" % self.path)
110 if not self.i_am_locking():
111 raise LockError("%s is locked, but not by me" % self.path)
112 self._remove_lockfile()
113 except WinErr:
114 # Windows is unreliable when it comes to file locking. YMMV.
115 pass
[email protected]5a306a22014-02-24 22:13:59116
117 def break_lock(self):
118 """Remove the lock, even if it was created by someone else."""
119 try:
120 self._remove_lockfile()
121 return True
122 except OSError as exc:
123 if exc.errno == errno.ENOENT:
124 return False
125 else:
126 raise
127
128 def is_locked(self):
129 """Test if the file is locked by anyone.
130
131 Note: This method is potentially racy. By the time it returns the lockfile
132 may have been unlocked, removed, or stolen by some other process.
133 """
134 return os.path.exists(self.lockfile)
135
136 def i_am_locking(self):
137 """Test if the file is locked by this process."""
138 return self.is_locked() and self.pid == self._read_pid()
139
[email protected]5a306a22014-02-24 22:13:59140
[email protected]848fd492014-04-09 19:06:44141class Mirror(object):
142
143 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
144 gsutil_exe = os.path.join(
145 os.path.dirname(os.path.abspath(__file__)),
146 'third_party', 'gsutil', 'gsutil')
[email protected]848fd492014-04-09 19:06:44147
148 def __init__(self, url, refs=None, print_func=None):
149 self.url = url
150 self.refs = refs or []
151 self.basedir = self.UrlToCacheDir(url)
152 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
153 self.print = print_func or print
154
[email protected]f8fa23d2014-06-05 01:00:04155 @property
156 def bootstrap_bucket(self):
157 if 'chrome-internal' in self.url:
158 return 'chrome-git-cache'
159 else:
160 return 'chromium-git-cache'
161
[email protected]174766f2014-05-13 21:27:46162 @classmethod
163 def FromPath(cls, path):
164 return cls(cls.CacheDirToUrl(path))
165
[email protected]848fd492014-04-09 19:06:44166 @staticmethod
167 def UrlToCacheDir(url):
168 """Convert a git url to a normalized form for the cache dir path."""
169 parsed = urlparse.urlparse(url)
170 norm_url = parsed.netloc + parsed.path
171 if norm_url.endswith('.git'):
172 norm_url = norm_url[:-len('.git')]
173 return norm_url.replace('-', '--').replace('/', '-').lower()
174
175 @staticmethod
[email protected]174766f2014-05-13 21:27:46176 def CacheDirToUrl(path):
177 """Convert a cache dir path to its corresponding url."""
178 netpath = re.sub(r'\b-\b', '/', os.path.basename(path)).replace('--', '-')
179 return 'https://%s' % netpath
180
181 @staticmethod
[email protected]848fd492014-04-09 19:06:44182 def FindExecutable(executable):
183 """This mimics the "which" utility."""
184 path_folders = os.environ.get('PATH').split(os.pathsep)
185
186 for path_folder in path_folders:
187 target = os.path.join(path_folder, executable)
188 # Just incase we have some ~/blah paths.
189 target = os.path.abspath(os.path.expanduser(target))
190 if os.path.isfile(target) and os.access(target, os.X_OK):
191 return target
[email protected]6b5faf52014-04-09 21:54:21192 if sys.platform.startswith('win'):
193 for suffix in ('.bat', '.cmd', '.exe'):
194 alt_target = target + suffix
[email protected]4039b312014-04-09 21:56:46195 if os.path.isfile(alt_target) and os.access(alt_target, os.X_OK):
[email protected]6b5faf52014-04-09 21:54:21196 return alt_target
[email protected]848fd492014-04-09 19:06:44197 return None
198
199 @classmethod
200 def SetCachePath(cls, cachepath):
201 setattr(cls, 'cachepath', cachepath)
202
203 @classmethod
204 def GetCachePath(cls):
205 if not hasattr(cls, 'cachepath'):
206 try:
207 cachepath = subprocess.check_output(
208 [cls.git_exe, 'config', '--global', 'cache.cachepath']).strip()
209 except subprocess.CalledProcessError:
210 cachepath = None
211 if not cachepath:
212 raise RuntimeError('No global cache.cachepath git configuration found.')
213 setattr(cls, 'cachepath', cachepath)
214 return getattr(cls, 'cachepath')
215
216 def RunGit(self, cmd, **kwargs):
217 """Run git in a subprocess."""
218 cwd = kwargs.setdefault('cwd', self.mirror_path)
219 kwargs.setdefault('print_stdout', False)
220 kwargs.setdefault('filter_fn', self.print)
221 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
222 env.setdefault('GIT_ASKPASS', 'true')
223 env.setdefault('SSH_ASKPASS', 'true')
224 self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd))
225 gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs)
226
227 def config(self, cwd=None):
228 if cwd is None:
229 cwd = self.mirror_path
[email protected]301a7c32014-06-16 17:13:50230
231 # Don't run git-gc in a daemon. Bad things can happen if it gets killed.
232 self.RunGit(['config', 'gc.autodetach', '0'], cwd=cwd)
233
234 # Don't combine pack files into one big pack file. It's really slow for
235 # repositories, and there's no way to track progress and make sure it's
236 # not stuck.
237 self.RunGit(['config', 'gc.autopacklimit', '0'], cwd=cwd)
238
239 # Allocate more RAM for cache-ing delta chains, for better performance
240 # of "Resolving deltas".
[email protected]848fd492014-04-09 19:06:44241 self.RunGit(['config', 'core.deltaBaseCacheLimit',
242 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd)
[email protected]301a7c32014-06-16 17:13:50243
[email protected]848fd492014-04-09 19:06:44244 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd)
245 self.RunGit(['config', '--replace-all', 'remote.origin.fetch',
[email protected]965c44f2014-08-19 21:19:19246 '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'], cwd=cwd)
[email protected]848fd492014-04-09 19:06:44247 for ref in self.refs:
248 ref = ref.lstrip('+').rstrip('/')
249 if ref.startswith('refs/'):
250 refspec = '+%s:%s' % (ref, ref)
[email protected]965c44f2014-08-19 21:19:19251 regex = r'\+%s:.*' % ref.replace('*', r'\*')
[email protected]848fd492014-04-09 19:06:44252 else:
253 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref)
[email protected]965c44f2014-08-19 21:19:19254 regex = r'\+refs/heads/%s:.*' % ref.replace('*', r'\*')
255 self.RunGit(
256 ['config', '--replace-all', 'remote.origin.fetch', refspec, regex],
257 cwd=cwd)
[email protected]848fd492014-04-09 19:06:44258
259 def bootstrap_repo(self, directory):
[email protected]aa1e1a42014-06-26 21:58:51260 """Bootstrap the repo from Google Stroage if possible.
261
262 More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing().
263 """
[email protected]848fd492014-04-09 19:06:44264
[email protected]776a2c32014-04-25 07:54:25265 python_fallback = False
266 if sys.platform.startswith('win') and not self.FindExecutable('7z'):
267 python_fallback = True
268 elif sys.platform.startswith('darwin'):
269 # The OSX version of unzip doesn't support zip64.
270 python_fallback = True
271 elif not self.FindExecutable('unzip'):
272 python_fallback = True
[email protected]848fd492014-04-09 19:06:44273
274 gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
[email protected]c2bc22d2014-06-05 21:19:38275 gsutil = Gsutil(self.gsutil_exe, boto_path=None, bypass_prodaccess=True)
[email protected]848fd492014-04-09 19:06:44276 # Get the most recent version of the zipfile.
277 _, ls_out, _ = gsutil.check_call('ls', gs_folder)
278 ls_out_sorted = sorted(ls_out.splitlines())
279 if not ls_out_sorted:
280 # This repo is not on Google Storage.
281 return False
282 latest_checkout = ls_out_sorted[-1]
283
284 # Download zip file to a temporary directory.
285 try:
[email protected]1cbf1042014-06-17 18:26:24286 tempdir = tempfile.mkdtemp(prefix='_cache_tmp', dir=self.GetCachePath())
[email protected]848fd492014-04-09 19:06:44287 self.print('Downloading %s' % latest_checkout)
[email protected]c58d11d2014-06-09 23:34:35288 code = gsutil.call('cp', latest_checkout, tempdir)
[email protected]848fd492014-04-09 19:06:44289 if code:
[email protected]848fd492014-04-09 19:06:44290 return False
291 filename = os.path.join(tempdir, latest_checkout.split('/')[-1])
292
[email protected]776a2c32014-04-25 07:54:25293 # Unpack the file with 7z on Windows, unzip on linux, or fallback.
294 if not python_fallback:
295 if sys.platform.startswith('win'):
296 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
297 else:
298 cmd = ['unzip', filename, '-d', directory]
299 retcode = subprocess.call(cmd)
[email protected]848fd492014-04-09 19:06:44300 else:
[email protected]776a2c32014-04-25 07:54:25301 try:
302 with zipfile.ZipFile(filename, 'r') as f:
303 f.printdir()
304 f.extractall(directory)
305 except Exception as e:
306 self.print('Encountered error: %s' % str(e), file=sys.stderr)
307 retcode = 1
308 else:
309 retcode = 0
[email protected]848fd492014-04-09 19:06:44310 finally:
311 # Clean up the downloaded zipfile.
312 gclient_utils.rmtree(tempdir)
313
314 if retcode:
315 self.print(
316 'Extracting bootstrap zipfile %s failed.\n'
317 'Resuming normal operations.' % filename)
318 return False
319 return True
320
321 def exists(self):
322 return os.path.isfile(os.path.join(self.mirror_path, 'config'))
323
[email protected]aa1e1a42014-06-26 21:58:51324 def _ensure_bootstrapped(self, depth, bootstrap, force=False):
325 tempdir = None
326 config_file = os.path.join(self.mirror_path, 'config')
327 pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
328 pack_files = []
329
330 if os.path.isdir(pack_dir):
331 pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')]
332
333 should_bootstrap = (force or
334 not os.path.exists(config_file) or
335 len(pack_files) > GC_AUTOPACKLIMIT)
336 if should_bootstrap:
337 tempdir = tempfile.mkdtemp(
338 prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath())
339 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
340 if bootstrapped:
341 # Bootstrap succeeded; delete previous cache, if any.
342 try:
343 # Try to move folder to tempdir if possible.
344 defunct_dir = tempfile.mkdtemp()
345 shutil.move(self.mirror_path, defunct_dir)
346 self.print('Moved defunct directory for repository %s from %s to %s'
347 % (self.url, self.mirror_path, defunct_dir))
348 except Exception:
349 gclient_utils.rmtree(self.mirror_path)
350 elif not os.path.exists(config_file):
351 # Bootstrap failed, no previous cache; start with a bare git dir.
352 self.RunGit(['init', '--bare'], cwd=tempdir)
353 else:
354 # Bootstrap failed, previous cache exists; warn and continue.
355 logging.warn(
356 'Git cache has a lot of pack files (%d). Tried to re-bootstrap '
357 'but failed. Continuing with non-optimized repository.'
358 % len(pack_files))
359 gclient_utils.rmtree(tempdir)
360 tempdir = None
361 else:
362 if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
363 logging.warn(
364 'Shallow fetch requested, but repo cache already exists.')
365 return tempdir
366
367 def _fetch(self, rundir, verbose, depth):
368 self.config(rundir)
369 v = []
370 d = []
371 if verbose:
372 v = ['-v', '--progress']
373 if depth:
374 d = ['--depth', str(depth)]
375 fetch_cmd = ['fetch'] + v + d + ['origin']
376 fetch_specs = subprocess.check_output(
377 [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
378 cwd=rundir).strip().splitlines()
379 for spec in fetch_specs:
380 try:
381 self.print('Fetching %s' % spec)
382 self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
383 except subprocess.CalledProcessError:
384 if spec == '+refs/heads/*:refs/heads/*':
385 raise RefsHeadsFailedToFetch
386 logging.warn('Fetch of %s failed' % spec)
387
[email protected]848fd492014-04-09 19:06:44388 def populate(self, depth=None, shallow=False, bootstrap=False,
[email protected]108eced2014-06-19 21:22:43389 verbose=False, ignore_lock=False):
[email protected]b0a13a22014-06-18 00:52:25390 assert self.GetCachePath()
[email protected]848fd492014-04-09 19:06:44391 if shallow and not depth:
392 depth = 10000
393 gclient_utils.safe_makedirs(self.GetCachePath())
394
[email protected]108eced2014-06-19 21:22:43395 lockfile = Lockfile(self.mirror_path)
396 if not ignore_lock:
397 lockfile.lock()
398
[email protected]aa1e1a42014-06-26 21:58:51399 tempdir = None
[email protected]108eced2014-06-19 21:22:43400 try:
[email protected]aa1e1a42014-06-26 21:58:51401 tempdir = self._ensure_bootstrapped(depth, bootstrap)
[email protected]848fd492014-04-09 19:06:44402 rundir = tempdir or self.mirror_path
[email protected]aa1e1a42014-06-26 21:58:51403 self._fetch(rundir, verbose, depth)
404 except RefsHeadsFailedToFetch:
405 # This is a major failure, we need to clean and force a bootstrap.
406 gclient_utils.rmtree(rundir)
407 self.print(GIT_CACHE_CORRUPT_MESSAGE)
408 tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
409 assert tempdir
410 self._fetch(tempdir or self.mirror_path, verbose, depth)
411 finally:
[email protected]848fd492014-04-09 19:06:44412 if tempdir:
[email protected]4e2ad842014-07-19 01:23:45413 try:
414 os.rename(tempdir, self.mirror_path)
415 except OSError as e:
416 # This is somehow racy on Windows.
417 # Catching OSError because WindowsError isn't portable and
418 # pylint complains.
419 self.print('Error moving %s to %s: %s' % (tempdir, self.mirror_path,
420 str(e)))
[email protected]108eced2014-06-19 21:22:43421 if not ignore_lock:
422 lockfile.unlock()
[email protected]848fd492014-04-09 19:06:44423
[email protected]c8444f32014-06-18 23:18:17424 def update_bootstrap(self, prune=False):
[email protected]848fd492014-04-09 19:06:44425 # The files are named <git number>.zip
426 gen_number = subprocess.check_output(
427 [self.git_exe, 'number', 'master'], cwd=self.mirror_path).strip()
428 self.RunGit(['gc']) # Run Garbage Collect to compress packfile.
429 # Creating a temp file and then deleting it ensures we can use this name.
430 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip')
431 os.remove(tmp_zipfile)
432 subprocess.call(['zip', '-r', tmp_zipfile, '.'], cwd=self.mirror_path)
433 gsutil = Gsutil(path=self.gsutil_exe, boto_path=None)
[email protected]c8444f32014-06-18 23:18:17434 gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
435 dest_name = '%s/%s.zip' % (gs_folder, gen_number)
[email protected]848fd492014-04-09 19:06:44436 gsutil.call('cp', tmp_zipfile, dest_name)
437 os.remove(tmp_zipfile)
438
[email protected]c8444f32014-06-18 23:18:17439 # Remove all other files in the same directory.
440 if prune:
441 _, ls_out, _ = gsutil.check_call('ls', gs_folder)
442 for filename in ls_out.splitlines():
443 if filename == dest_name:
444 continue
445 gsutil.call('rm', filename)
446
[email protected]cdfcd7c2014-06-10 23:40:46447 @staticmethod
448 def DeleteTmpPackFiles(path):
449 pack_dir = os.path.join(path, 'objects', 'pack')
[email protected]33418492014-06-18 19:03:39450 if not os.path.isdir(pack_dir):
451 return
[email protected]cdfcd7c2014-06-10 23:40:46452 pack_files = [f for f in os.listdir(pack_dir) if
453 f.startswith('.tmp-') or f.startswith('tmp_pack_')]
454 for f in pack_files:
455 f = os.path.join(pack_dir, f)
456 try:
457 os.remove(f)
458 logging.warn('Deleted stale temporary pack file %s' % f)
459 except OSError:
460 logging.warn('Unable to delete temporary pack file %s' % f)
[email protected]174766f2014-05-13 21:27:46461
[email protected]1cbf1042014-06-17 18:26:24462 @classmethod
463 def BreakLocks(cls, path):
[email protected]174766f2014-05-13 21:27:46464 did_unlock = False
465 lf = Lockfile(path)
466 if lf.break_lock():
467 did_unlock = True
468 # Look for lock files that might have been left behind by an interrupted
469 # git process.
470 lf = os.path.join(path, 'config.lock')
471 if os.path.exists(lf):
472 os.remove(lf)
473 did_unlock = True
[email protected]1cbf1042014-06-17 18:26:24474 cls.DeleteTmpPackFiles(path)
[email protected]174766f2014-05-13 21:27:46475 return did_unlock
476
[email protected]848fd492014-04-09 19:06:44477 def unlock(self):
[email protected]174766f2014-05-13 21:27:46478 return self.BreakLocks(self.mirror_path)
479
480 @classmethod
481 def UnlockAll(cls):
482 cachepath = cls.GetCachePath()
[email protected]b0a13a22014-06-18 00:52:25483 if not cachepath:
484 return
[email protected]174766f2014-05-13 21:27:46485 dirlist = os.listdir(cachepath)
486 repo_dirs = set([os.path.join(cachepath, path) for path in dirlist
487 if os.path.isdir(os.path.join(cachepath, path))])
488 for dirent in dirlist:
[email protected]1cbf1042014-06-17 18:26:24489 if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
490 gclient_utils.rmtree(os.path.join(cachepath, dirent))
491 elif (dirent.endswith('.lock') and
[email protected]174766f2014-05-13 21:27:46492 os.path.isfile(os.path.join(cachepath, dirent))):
493 repo_dirs.add(os.path.join(cachepath, dirent[:-5]))
494
495 unlocked_repos = []
496 for repo_dir in repo_dirs:
497 if cls.BreakLocks(repo_dir):
498 unlocked_repos.append(repo_dir)
499
500 return unlocked_repos
[email protected]848fd492014-04-09 19:06:44501
[email protected]5a306a22014-02-24 22:13:59502@subcommand.usage('[url of repo to check for caching]')
503def CMDexists(parser, args):
504 """Check to see if there already is a cache of the given repo."""
[email protected]848fd492014-04-09 19:06:44505 _, args = parser.parse_args(args)
[email protected]5a306a22014-02-24 22:13:59506 if not len(args) == 1:
507 parser.error('git cache exists only takes exactly one repo url.')
508 url = args[0]
[email protected]848fd492014-04-09 19:06:44509 mirror = Mirror(url)
510 if mirror.exists():
511 print(mirror.mirror_path)
[email protected]5a306a22014-02-24 22:13:59512 return 0
513 return 1
514
515
[email protected]563559c2014-04-02 00:36:24516@subcommand.usage('[url of repo to create a bootstrap zip file]')
517def CMDupdate_bootstrap(parser, args):
518 """Create and uploads a bootstrap tarball."""
519 # Lets just assert we can't do this on Windows.
520 if sys.platform.startswith('win'):
[email protected]848fd492014-04-09 19:06:44521 print('Sorry, update bootstrap will not work on Windows.', file=sys.stderr)
[email protected]563559c2014-04-02 00:36:24522 return 1
523
[email protected]c8444f32014-06-18 23:18:17524 parser.add_option('--prune', action='store_true',
525 help='Prune all other cached zipballs of the same repo.')
526
[email protected]563559c2014-04-02 00:36:24527 # First, we need to ensure the cache is populated.
528 populate_args = args[:]
[email protected]b9f27512014-08-08 15:52:33529 populate_args.append('--no-bootstrap')
[email protected]563559c2014-04-02 00:36:24530 CMDpopulate(parser, populate_args)
531
532 # Get the repo directory.
[email protected]c8444f32014-06-18 23:18:17533 options, args = parser.parse_args(args)
[email protected]563559c2014-04-02 00:36:24534 url = args[0]
[email protected]848fd492014-04-09 19:06:44535 mirror = Mirror(url)
[email protected]c8444f32014-06-18 23:18:17536 mirror.update_bootstrap(options.prune)
[email protected]848fd492014-04-09 19:06:44537 return 0
[email protected]563559c2014-04-02 00:36:24538
539
[email protected]5a306a22014-02-24 22:13:59540@subcommand.usage('[url of repo to add to or update in cache]')
541def CMDpopulate(parser, args):
542 """Ensure that the cache has all up-to-date objects for the given repo."""
543 parser.add_option('--depth', type='int',
544 help='Only cache DEPTH commits of history')
545 parser.add_option('--shallow', '-s', action='store_true',
546 help='Only cache 10000 commits of history')
547 parser.add_option('--ref', action='append',
548 help='Specify additional refs to be fetched')
[email protected]b9f27512014-08-08 15:52:33549 parser.add_option('--no_bootstrap', '--no-bootstrap',
550 action='store_true',
[email protected]563559c2014-04-02 00:36:24551 help='Don\'t bootstrap from Google Storage')
[email protected]b9f27512014-08-08 15:52:33552 parser.add_option('--ignore_locks', '--ignore-locks',
553 action='store_true',
[email protected]108eced2014-06-19 21:22:43554 help='Don\'t try to lock repository')
[email protected]563559c2014-04-02 00:36:24555
[email protected]5a306a22014-02-24 22:13:59556 options, args = parser.parse_args(args)
[email protected]5a306a22014-02-24 22:13:59557 if not len(args) == 1:
558 parser.error('git cache populate only takes exactly one repo url.')
559 url = args[0]
560
[email protected]848fd492014-04-09 19:06:44561 mirror = Mirror(url, refs=options.ref)
562 kwargs = {
563 'verbose': options.verbose,
564 'shallow': options.shallow,
565 'bootstrap': not options.no_bootstrap,
[email protected]108eced2014-06-19 21:22:43566 'ignore_lock': options.ignore_locks,
[email protected]848fd492014-04-09 19:06:44567 }
[email protected]5a306a22014-02-24 22:13:59568 if options.depth:
[email protected]848fd492014-04-09 19:06:44569 kwargs['depth'] = options.depth
570 mirror.populate(**kwargs)
[email protected]5a306a22014-02-24 22:13:59571
572
[email protected]f3145112014-08-07 21:02:36573@subcommand.usage('Fetch new commits into cache and current checkout')
574def CMDfetch(parser, args):
575 """Update mirror, and fetch in cwd."""
576 parser.add_option('--all', action='store_true', help='Fetch all remotes')
577 options, args = parser.parse_args(args)
578
579 # Figure out which remotes to fetch. This mimics the behavior of regular
580 # 'git fetch'. Note that in the case of "stacked" or "pipelined" branches,
581 # this will NOT try to traverse up the branching structure to find the
582 # ultimate remote to update.
583 remotes = []
584 if options.all:
585 assert not args, 'fatal: fetch --all does not take a repository argument'
586 remotes = subprocess.check_output([Mirror.git_exe, 'remote']).splitlines()
587 elif args:
588 remotes = args
589 else:
590 current_branch = subprocess.check_output(
591 [Mirror.git_exe, 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
592 if current_branch != 'HEAD':
593 upstream = subprocess.check_output(
594 [Mirror.git_exe, 'config', 'branch.%s.remote' % current_branch]
595 ).strip()
596 if upstream and upstream != '.':
597 remotes = [upstream]
598 if not remotes:
599 remotes = ['origin']
600
601 cachepath = Mirror.GetCachePath()
602 git_dir = os.path.abspath(subprocess.check_output(
603 [Mirror.git_exe, 'rev-parse', '--git-dir']))
604 git_dir = os.path.abspath(git_dir)
605 if git_dir.startswith(cachepath):
606 mirror = Mirror.FromPath(git_dir)
607 mirror.populate()
608 return 0
609 for remote in remotes:
610 remote_url = subprocess.check_output(
611 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip()
612 if remote_url.startswith(cachepath):
613 mirror = Mirror.FromPath(remote_url)
614 mirror.print = lambda *args: None
615 print('Updating git cache...')
616 mirror.populate()
617 subprocess.check_call([Mirror.git_exe, 'fetch', remote])
618 return 0
619
620
[email protected]5a306a22014-02-24 22:13:59621@subcommand.usage('[url of repo to unlock, or -a|--all]')
622def CMDunlock(parser, args):
623 """Unlock one or all repos if their lock files are still around."""
624 parser.add_option('--force', '-f', action='store_true',
625 help='Actually perform the action')
626 parser.add_option('--all', '-a', action='store_true',
627 help='Unlock all repository caches')
628 options, args = parser.parse_args(args)
629 if len(args) > 1 or (len(args) == 0 and not options.all):
630 parser.error('git cache unlock takes exactly one repo url, or --all')
631
[email protected]5a306a22014-02-24 22:13:59632 if not options.force:
[email protected]174766f2014-05-13 21:27:46633 cachepath = Mirror.GetCachePath()
634 lockfiles = [os.path.join(cachepath, path)
635 for path in os.listdir(cachepath)
636 if path.endswith('.lock') and os.path.isfile(path)]
[email protected]5a306a22014-02-24 22:13:59637 parser.error('git cache unlock requires -f|--force to do anything. '
638 'Refusing to unlock the following repo caches: '
639 ', '.join(lockfiles))
640
[email protected]848fd492014-04-09 19:06:44641 unlocked_repos = []
[email protected]174766f2014-05-13 21:27:46642 if options.all:
643 unlocked_repos.extend(Mirror.UnlockAll())
644 else:
645 m = Mirror(args[0])
646 if m.unlock():
647 unlocked_repos.append(m.mirror_path)
[email protected]5a306a22014-02-24 22:13:59648
[email protected]848fd492014-04-09 19:06:44649 if unlocked_repos:
650 logging.info('Broke locks on these caches:\n %s' % '\n '.join(
651 unlocked_repos))
[email protected]5a306a22014-02-24 22:13:59652
653
654class OptionParser(optparse.OptionParser):
655 """Wrapper class for OptionParser to handle global options."""
656
657 def __init__(self, *args, **kwargs):
658 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs)
659 self.add_option('-c', '--cache-dir',
660 help='Path to the directory containing the cache')
[email protected]2c391af2014-05-23 09:07:15661 self.add_option('-v', '--verbose', action='count', default=1,
[email protected]5a306a22014-02-24 22:13:59662 help='Increase verbosity (can be passed multiple times)')
[email protected]2c391af2014-05-23 09:07:15663 self.add_option('-q', '--quiet', action='store_true',
664 help='Suppress all extraneous output')
[email protected]5a306a22014-02-24 22:13:59665
666 def parse_args(self, args=None, values=None):
667 options, args = optparse.OptionParser.parse_args(self, args, values)
[email protected]2c391af2014-05-23 09:07:15668 if options.quiet:
669 options.verbose = 0
670
671 levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
672 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
[email protected]5a306a22014-02-24 22:13:59673
674 try:
[email protected]848fd492014-04-09 19:06:44675 global_cache_dir = Mirror.GetCachePath()
676 except RuntimeError:
677 global_cache_dir = None
678 if options.cache_dir:
679 if global_cache_dir and (
680 os.path.abspath(options.cache_dir) !=
681 os.path.abspath(global_cache_dir)):
682 logging.warn('Overriding globally-configured cache directory.')
683 Mirror.SetCachePath(options.cache_dir)
[email protected]5a306a22014-02-24 22:13:59684
[email protected]5a306a22014-02-24 22:13:59685 return options, args
686
687
688def main(argv):
689 dispatcher = subcommand.CommandDispatcher(__name__)
690 return dispatcher.execute(OptionParser(), argv)
691
692
693if __name__ == '__main__':
694 sys.exit(main(sys.argv[1:]))