source_index.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599
  1. #!/usr/bin/env python
  2. # Copyright (c) 2011 The Chromium Authors. All rights reserved.
  3. # Use of this source code is governed by a BSD-style license that can be
  4. # found in the LICENSE file.
  5. """Usage: Run with the '--help' flag to see how to use this script.
  6. This tool will take a PDB on the command line, extract the source files that
  7. were used in building the PDB, query the source server for which repository
  8. and revision these files are at, and then finally write this information back
  9. into the PDB in a format that the debugging tools understand. This allows for
  10. automatic source debugging, as all of the information is contained in the PDB,
  11. and the debugger can go out and fetch the source files.
  12. You most likely want to run these immediately after a build, since the source
  13. input files need to match the generated PDB, and we want the correct
  14. revision information for the exact files that were used for the build.
  15. Here's a quick overview of what this script does:
  16. - Extract the list of source files listed in the PDB, i.e. all the source
  17. files that have been used to produce the matching binary. This list contains
  18. some files for which the source code is accessible (e.g. files from the
  19. Chromium repo) and some from private repos (e.g. files that have been used
  20. to build the CRT static library that we link against).
  21. - Iterate over the list of files from the previous step, from here there's a
  22. few different possibilities:
  23. - This file is coming from a public Git repository (e.g. Chromium), in
  24. this case this script will list all the files that are contained in this
  25. repository and index them all at once (and then remove them from the
  26. file list it's iterating over).
  27. - This file is a generated file produced during the build. It will likely
  28. be living in a subdirectory of the build directory, if the "--build-dir"
  29. flag has been passed to this flag this flag this directory will be
  30. automatically ignored.
  31. - The directory containing this file isn't part of a Git checkout, this
  32. file will be excluded and all the files from this directory will get
  33. added to an exclusion list. Specifying the toolchain directory (via the
  34. "--toolchain-dir" flag) allow automatically skipping all the files from
  35. the VS toolchain directory (e.g. all the CRT files), this is much faster
  36. than using Git to check if these files are from a Git repo.
  37. - This file doesn't exist on disk, which means that it is coming from a
  38. third party static library. This file will be ignored.
  39. - A map that associates each source file to a public URL will be added in a
  40. new stream in the PDB.
  41. NOTE: Expected to run under a native win32 python, NOT cygwin. All paths are
  42. dealt with as win32 paths, since we have to interact with the Microsoft tools.
  43. """
  44. from __future__ import print_function
  45. try:
  46. # Python 3.x
  47. from urllib.parse import urlparse
  48. # Replace the Python 2 unicode function with str when running Python 3.
  49. unicode = str
  50. except ImportError:
  51. # Python 2.x
  52. from urlparse import urlparse
  53. import optparse
  54. import os
  55. import re
  56. import subprocess
  57. import sys
  58. import tempfile
  59. import time
  60. import win32api
  61. from collections import namedtuple
  62. # Map that associates Git repository URLs with the URL that should be used to
  63. # retrieve individual files from this repo. Entries in this map should have the
  64. # following format:
  65. # {
  66. # 'url': |path to public URL, with the {revision} and {file_path} tags|,
  67. # 'base64': |boolean indicating if the files are base64 encoded|
  68. # }
  69. #
  70. # Here's an example of what the entry for the Chromium repo looks like:
  71. # {
  72. # 'url': 'chromium.googlesource/+/{revision}/{file_path}?format=TEXT',
  73. # 'base64': True
  74. # }
  75. #
  76. # The {revision} and {file_path} will be replaced by the appropriate values when
  77. # building the source indexing map that gets added to the PDB.
  78. #
  79. # TODO(sebmarchand): Check if this is really needed, this is a legacy thing
  80. # coming from when this script was used for SVN repos and we could probably do
  81. # without it.
  82. REPO_MAP = {}
  83. # Regex matching a junction at it's printed by the 'dir' command.
  84. # It usually looks like this when the junction has been created with mklink:
  85. #
  86. # Directory of C:\a
  87. #
  88. # 07/23/2015 06:42 PM <JUNCTION> b [C:\real_a\b]
  89. #
  90. # The junctions created with the 'junction' utility look almost the same, except
  91. # for a leading '\??\' on the junction target:
  92. #
  93. # 07/23/2015 06:42 PM <JUNCTION> b [\??\C:\real_a\b]
  94. _DIR_JUNCTION_RE = re.compile(r"""
  95. .*<JUNCTION\>\s+(?P<dirname>[^ ]+)\s+\[(\\\?\?\\)?(?P<real_path>.*)\]
  96. """, re.VERBOSE)
  97. # A named tuple used to store the information about a repository.
  98. #
  99. # It contains the following members:
  100. # - repo: The URL of the repository;
  101. # - rev: The revision (or hash) of the current checkout.
  102. # - file_list: The list of files coming from this repository.
  103. # - root_path: The root path of this checkout.
  104. # - path_prefix: A prefix to apply to the filename of the files coming from
  105. # this repository.
  106. RevisionInfo = namedtuple('RevisionInfo',
  107. ['repo', 'rev', 'files', 'root_path', 'path_prefix'])
  108. def GetCasedFilePath(filename):
  109. """Return the correctly cased path for a given filename"""
  110. return win32api.GetLongPathName(win32api.GetShortPathName(unicode(filename)))
  111. def FindSrcSrvFile(filename, toolchain_dir):
  112. """Return the absolute path for a file in the srcsrv directory.
  113. If |toolchain_dir| is null then this will assume that the file is in this
  114. script's directory.
  115. """
  116. bin_dir = os.path.join(toolchain_dir, 'Windows Kits', '10', 'Debuggers',
  117. 'x64', 'srcsrv')
  118. if not os.path.exists(bin_dir):
  119. bin_dir = os.path.join(toolchain_dir, 'win_sdk', 'Debuggers', 'x64',
  120. 'srcsrv')
  121. assert(os.path.exists(bin_dir))
  122. return os.path.abspath(os.path.join(bin_dir, filename))
  123. def RunCommand(*cmd, **kwargs):
  124. """Runs a command.
  125. Returns what have been printed to stdout by this command.
  126. kwargs:
  127. raise_on_failure: Indicates if an exception should be raised on failure, if
  128. set to false then the function will return None.
  129. """
  130. kwargs.setdefault('stdin', subprocess.PIPE)
  131. kwargs.setdefault('stdout', subprocess.PIPE)
  132. kwargs.setdefault('stderr', subprocess.PIPE)
  133. kwargs.setdefault('universal_newlines', True)
  134. raise_on_failure = kwargs.pop('raise_on_failure', True)
  135. proc = subprocess.Popen(cmd, **kwargs)
  136. ret, err = proc.communicate()
  137. if proc.returncode != 0:
  138. if raise_on_failure:
  139. print('Error: %s' % err)
  140. raise subprocess.CalledProcessError(proc.returncode, cmd)
  141. return
  142. ret = (ret or '').rstrip('\n')
  143. return ret
  144. def ExtractSourceFiles(pdb_filename, toolchain_dir):
  145. """Extract a list of local paths of the source files from a PDB."""
  146. # Don't use |RunCommand| as it expect the return code to be 0 on success but
  147. # srctool returns the number of files instead.
  148. srctool = subprocess.Popen([FindSrcSrvFile('srctool.exe', toolchain_dir),
  149. '-r', pdb_filename],
  150. stdout=subprocess.PIPE, stderr=subprocess.PIPE,
  151. universal_newlines=True)
  152. src_files, _ = srctool.communicate()
  153. if (not src_files or src_files.startswith("srctool: ") or
  154. srctool.returncode <= 0):
  155. raise Exception("srctool failed: " + src_files)
  156. return set(
  157. x.rstrip('\n').lower() for x in src_files.split('\n') if len(x) != 0)
  158. def ReadSourceStream(pdb_filename, toolchain_dir):
  159. """Read the contents of the source information stream from a PDB."""
  160. pdbstr = subprocess.Popen([FindSrcSrvFile('pdbstr.exe', toolchain_dir),
  161. '-r', '-s:srcsrv',
  162. '-p:%s' % pdb_filename],
  163. stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  164. data, _ = pdbstr.communicate()
  165. data = data.decode('utf-8')
  166. # Old version of pdbstr.exe return -1 when the source requested stream is
  167. # missing, while more recent ones return 1, use |abs| to workaround this.
  168. if (((pdbstr.returncode != 0 and abs(pdbstr.returncode) != 1) or
  169. data.startswith("pdbstr: "))):
  170. raise Exception("pdbstr failed: " + data)
  171. return data
  172. def WriteSourceStream(pdb_filename, data, toolchain_dir):
  173. """Write the contents of the source information stream to a PDB."""
  174. # Write out the data to a temporary filename that we can pass to pdbstr.
  175. (f, fname) = tempfile.mkstemp()
  176. f = os.fdopen(f, "wb")
  177. f.write(data.encode('utf-8'))
  178. f.close()
  179. srctool = subprocess.Popen([FindSrcSrvFile('pdbstr.exe', toolchain_dir),
  180. '-w', '-s:srcsrv',
  181. '-i:%s' % fname,
  182. '-p:%s' % pdb_filename],
  183. stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  184. data, _ = srctool.communicate()
  185. data = data.decode('utf-8')
  186. if ((srctool.returncode != 0 and srctool.returncode != -1) or
  187. data.startswith("pdbstr: ")):
  188. raise Exception("pdbstr failed: " + data)
  189. os.unlink(fname)
  190. def ExtractGitInfo(local_filename):
  191. """Checks if a file is coming from a git repository and if so returns some
  192. information about it.
  193. Args:
  194. local_filename: The name of the file that we want to check.
  195. Returns:
  196. None if the file doesn't come from a git repository, otherwise it returns a
  197. RevisionInfo tuple.
  198. """
  199. # Starts by checking if this file is coming from a git repository. For that
  200. # we'll start by calling 'git info' on this file; for this to work we need to
  201. # make sure that the current working directory is correctly cased. It turns
  202. # out that even on Windows the casing of the path passed in the |cwd| argument
  203. # of subprocess.Popen matters and if it's not correctly cased then 'git info'
  204. # will return None even if the file is coming from a git repository. This
  205. # is not the case if we're just interested in checking if the path containing
  206. # |local_filename| is coming from a git repository, in this case the casing
  207. # doesn't matter.
  208. local_filename = GetCasedFilePath(local_filename)
  209. local_file_basename = os.path.basename(local_filename)
  210. local_file_dir = os.path.dirname(local_filename)
  211. file_info = RunCommand('git.bat', 'log', '-n', '1', local_file_basename,
  212. cwd=local_file_dir, raise_on_failure=False)
  213. if not file_info:
  214. return
  215. # Get the revision of the master branch.
  216. rev = RunCommand('git.bat', 'rev-parse', 'HEAD', cwd=local_file_dir)
  217. # Get the url of the remote repository.
  218. repo = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
  219. cwd=local_file_dir)
  220. # If the repository point to a local directory then we need to run this
  221. # command one more time from this directory to get the repository url.
  222. if os.path.isdir(repo):
  223. repo = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
  224. cwd=repo)
  225. # Don't use the authenticated path.
  226. repo = repo.replace('googlesource.com/a/', 'googlesource.com/')
  227. # Get the relative file path for this file in the git repository.
  228. git_path = RunCommand('git.bat', 'ls-tree', '--full-name', '--name-only',
  229. 'HEAD', local_file_basename, cwd=local_file_dir).replace('/','\\')
  230. if not git_path:
  231. return
  232. git_root_path = local_filename.replace(git_path, '')
  233. if repo not in REPO_MAP:
  234. # Automatically adds the project coming from a git GoogleCode or Github
  235. # repository to the repository map.
  236. if urlparse(repo).netloc.endswith('.googlesource.com'):
  237. # The files from these repositories are accessible via gitiles in a
  238. # base64 encoded format.
  239. REPO_MAP[repo] = {
  240. 'url': '%s/+/{revision}/{file_path}?format=TEXT' % repo,
  241. 'base64': True
  242. }
  243. elif urlparse(repo).netloc.endswith('github.com'):
  244. raw_url = '%s/{revision}/{file_path}' % repo.replace('.git', '').replace(
  245. 'github.com', 'raw.githubusercontent.com')
  246. REPO_MAP[repo] = {
  247. 'url': raw_url,
  248. 'base64': False
  249. }
  250. # Get the list of files coming from this repository.
  251. git_file_list = RunCommand('git.bat', 'ls-tree', '--full-name', '--name-only',
  252. 'HEAD', '-r', cwd=git_root_path)
  253. file_list = [x for x in git_file_list.splitlines() if len(x) != 0]
  254. return RevisionInfo(repo=repo, rev=rev, files=file_list,
  255. root_path=git_root_path, path_prefix=None)
  256. def CheckForJunction(filename):
  257. """Check if a directory containing a file is a junction to another directory.
  258. If so return 3 values:
  259. - The real path to this file.
  260. - The root directory of this checkout relative to |filename| (i.e. not
  261. relative to the real path).
  262. - The sub directory of the repository that has been checked out.
  263. """
  264. # Process the path of |filename| from right to left until a junction has been
  265. # found.
  266. #
  267. # Here's an example of what this does, for this example
  268. # |filename| = 'C:/a/b/c/d.h' and 'C:/a' is a junction to 'C:/real_a/'.
  269. #
  270. # - We remove the filename part (we're only looking at directory junctions
  271. # here), that left us with 'C:/a/b/c'.
  272. # - During the first iteration we take 'C:/a/b' as our current root value and
  273. # 'c' as the leaf value.
  274. # - We run the 'dir' command on 'C:/a/b' and we look for a junction to 'c'. As
  275. # we don't find any we go up one directory. Now the current root is 'C:/a'
  276. # and the current leaf value is 'b'.
  277. # - We run the 'dir' command on 'C:/a' and we look for a junction to 'b'. This
  278. # time we find one so we return the following triplet:
  279. # - C:/real_a/b/c/d.h # The real file path.
  280. # - C:/a # The root directory containing this junction.
  281. # - b # The name of the junction.
  282. cur_root, cur_leaf = os.path.split(os.path.dirname(filename))
  283. while cur_leaf:
  284. # Run the 'dir' command and look for a junction.
  285. dir_cmd = RunCommand('cmd', '/c', 'dir', cur_root)
  286. for entry in dir_cmd.splitlines():
  287. m = _DIR_JUNCTION_RE.match(entry)
  288. if not m:
  289. continue
  290. if not m.group('dirname') == cur_leaf:
  291. continue
  292. real_path = filename.replace(os.path.join(cur_root, cur_leaf),
  293. m.group('real_path'))
  294. # This should always be the case.
  295. # TODO(sebmarchand): Remove this check if it proves to be useless.
  296. if os.path.exists(real_path):
  297. return real_path, cur_root, cur_leaf
  298. else:
  299. print('Source indexing: error: Unexpected non existing file \'%s\'' %
  300. real_path)
  301. return None, None, None
  302. cur_root, cur_leaf = os.path.split(cur_root)
  303. return None, None, None
  304. def IndexFilesFromRepo(
  305. local_filename, file_list, output_lines, follow_junctions):
  306. """Checks if a given file is a part of a Git repository and index all the
  307. files from this repository if it's the case.
  308. Args:
  309. local_filename: The filename of the current file.
  310. file_list: The list of files that should be indexed.
  311. output_lines: The source indexing lines that will be appended to the PDB.
  312. follow_junctions: Indicates if we should try to index the files in a
  313. junction.
  314. Returns the number of indexed files.
  315. """
  316. indexed_files = 0
  317. patch_root = None
  318. # Try to extract the revision info for the current file.
  319. info = ExtractGitInfo(local_filename)
  320. # If we haven't been able to find information for this file it might be
  321. # because its path contains a junction to another directory. It can be the
  322. # case if you do a Git checkout in C:/real_a/ and you're adding a junction to
  323. # one of the subdirectories (lets say 'b') of this checkout in another
  324. # project (e.g. 'C:/a'), so you'll end up with a partial Git checkout in a
  325. # junction, and any Git command in the path of the junction won't work (or
  326. # it'll return information related to 'C:/a' instead of 'C:/real_a').
  327. if not info and follow_junctions:
  328. real_filename, patch_root, patch_leaf = CheckForJunction(local_filename)
  329. if real_filename:
  330. info = ExtractGitInfo(real_filename)
  331. # Don't try to index the internal sources.
  332. if not info or ('internal.googlesource.com' in info.repo):
  333. return 0
  334. repo = info.repo
  335. rev = info.rev
  336. files = info.files
  337. if patch_root:
  338. files = [x for x in files if x.startswith(patch_leaf)]
  339. root_path = patch_root.lower()
  340. else:
  341. root_path = info.root_path.lower()
  342. # Checks if we should index this file and if the source that we'll retrieve
  343. # will be base64 encoded.
  344. should_index = False
  345. base_64 = False
  346. if repo in REPO_MAP:
  347. should_index = True
  348. base_64 = REPO_MAP[repo].get('base64')
  349. else:
  350. repo = None
  351. # Iterates over the files from this repo and index them if needed.
  352. for file_iter in files:
  353. current_filename = file_iter.lower()
  354. full_file_path = os.path.normpath(os.path.join(root_path, current_filename))
  355. # Checks if the file is in the list of files to be indexed.
  356. if full_file_path in file_list:
  357. if should_index:
  358. source_url = ''
  359. current_file = file_iter
  360. # Prefix the filename with the prefix for this repository if needed.
  361. if info.path_prefix:
  362. current_file = os.path.join(info.path_prefix, current_file)
  363. source_url = REPO_MAP[repo].get('url').format(revision=rev,
  364. file_path=os.path.normpath(current_file).replace('\\', '/'))
  365. output_lines.append('%s*%s*%s*%s*%s' % (full_file_path, current_file,
  366. rev, source_url, 'base64.b64decode' if base_64 else ''))
  367. indexed_files += 1
  368. file_list.remove(full_file_path)
  369. # The input file should have been removed from the list of files to index.
  370. if indexed_files and local_filename in file_list:
  371. print('%s shouldn\'t be in the list of files to index anymore.' % \
  372. local_filename)
  373. # TODO(sebmarchand): Turn this into an exception once I've confirmed that
  374. # this doesn't happen on the official builder.
  375. file_list.remove(local_filename)
  376. return indexed_files
  377. def DirectoryIsPartOfPublicGitRepository(local_dir):
  378. # Checks if this directory is from a public Git checkout.
  379. info = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
  380. cwd=local_dir, raise_on_failure=False)
  381. if info:
  382. if 'internal.googlesource.com' in info:
  383. return False
  384. return True
  385. return False
  386. def UpdatePDB(pdb_filename, verbose=True, build_dir=None, toolchain_dir=None,
  387. follow_junctions=False):
  388. """Update a pdb file with source information."""
  389. dir_exclusion_list = { }
  390. if build_dir:
  391. # Excluding the build directory allows skipping the generated files, for
  392. # Chromium this makes the indexing ~10x faster.
  393. build_dir = (os.path.normpath(build_dir)).lower()
  394. for directory, _, _ in os.walk(build_dir):
  395. dir_exclusion_list[directory.lower()] = True
  396. dir_exclusion_list[build_dir.lower()] = True
  397. if toolchain_dir:
  398. # Exclude the directories from the toolchain as we don't have revision info
  399. # for them.
  400. toolchain_dir = (os.path.normpath(toolchain_dir)).lower()
  401. for directory, _, _ in os.walk(toolchain_dir):
  402. dir_exclusion_list[directory.lower()] = True
  403. dir_exclusion_list[toolchain_dir.lower()] = True
  404. # Writes the header of the source index stream.
  405. #
  406. # Here's the description of the variables used in the SRC_* macros (those
  407. # variables have to be defined for every source file that we want to index):
  408. # var1: The file path.
  409. # var2: The name of the file without its path.
  410. # var3: The revision or the hash of this file's repository.
  411. # var4: The URL to this file.
  412. # var5: (optional) The python method to call to decode this file, e.g. for
  413. # a base64 encoded file this value should be 'base64.b64decode'.
  414. lines = [
  415. 'SRCSRV: ini ------------------------------------------------',
  416. 'VERSION=1',
  417. 'INDEXVERSION=2',
  418. 'VERCTRL=Subversion',
  419. 'DATETIME=%s' % time.asctime(),
  420. 'SRCSRV: variables ------------------------------------------',
  421. 'SRC_EXTRACT_TARGET_DIR=%targ%\\%fnbksl%(%var2%)\\%var3%',
  422. 'SRC_EXTRACT_TARGET=%SRC_EXTRACT_TARGET_DIR%\\%fnfile%(%var1%)',
  423. 'SRC_EXTRACT_CMD=cmd /c "mkdir "%SRC_EXTRACT_TARGET_DIR%" & python3 -c '
  424. '"import urllib.request, base64;'
  425. 'url = \\\"%var4%\\\";'
  426. 'u = urllib.request.urlopen(url);'
  427. 'open(r\\\"%SRC_EXTRACT_TARGET%\\\", \\\"wb\\\").write(%var5%('
  428. 'u.read()))"',
  429. 'SRCSRVTRG=%SRC_EXTRACT_TARGET%',
  430. 'SRCSRVCMD=%SRC_EXTRACT_CMD%',
  431. 'SRCSRV: source files ---------------------------------------',
  432. ]
  433. if ReadSourceStream(pdb_filename, toolchain_dir):
  434. raise Exception("PDB already has source indexing information!")
  435. filelist = ExtractSourceFiles(pdb_filename, toolchain_dir)
  436. number_of_files = len(filelist)
  437. indexed_files_total = 0
  438. t_init = time.time()
  439. t1 = t_init
  440. while filelist:
  441. filename = next(iter(filelist))
  442. filedir = os.path.dirname(filename)
  443. if verbose:
  444. print("[%d / %d] Processing: %s" % (number_of_files - len(filelist),
  445. number_of_files, filename))
  446. # Print a message every 60 seconds to make sure that the process doesn't
  447. # time out.
  448. if time.time() - t1 > 60:
  449. t1 = time.time()
  450. print("Still working, %d / %d files have been processed." %
  451. (number_of_files - len(filelist), number_of_files))
  452. # This directory is in the exclusion listed, either because it's not part of
  453. # a repository, or from one we're not interested in indexing.
  454. if dir_exclusion_list.get(filedir, False):
  455. if verbose:
  456. print(" skipping, directory is excluded.")
  457. filelist.remove(filename)
  458. continue
  459. # Skip the files that don't exist on the current machine.
  460. if not os.path.exists(filename):
  461. filelist.remove(filename)
  462. continue
  463. # Try to index the current file and all the ones coming from the same
  464. # repository.
  465. indexed_files = IndexFilesFromRepo(
  466. filename, filelist, lines, follow_junctions)
  467. if not indexed_files:
  468. if not DirectoryIsPartOfPublicGitRepository(filedir):
  469. dir_exclusion_list[filedir] = True
  470. if verbose:
  471. print("Adding %s to the exclusion list." % filedir)
  472. filelist.remove(filename)
  473. continue
  474. indexed_files_total += indexed_files
  475. if verbose:
  476. print(" %d files have been indexed." % indexed_files)
  477. print('Indexing took %d seconds' % (time.time() - t_init))
  478. lines.append('SRCSRV: end ------------------------------------------------')
  479. WriteSourceStream(pdb_filename, '\r\n'.join(lines), toolchain_dir)
  480. if verbose:
  481. print("%d / %d files have been indexed." % (indexed_files_total,
  482. number_of_files))
  483. def main():
  484. parser = optparse.OptionParser()
  485. parser.add_option('-v', '--verbose', action='store_true', default=False)
  486. parser.add_option('--build-dir', help='The original build directory, if set '
  487. 'all the files present in this directory (or one of its subdirectories) '
  488. 'will be skipped.')
  489. parser.add_option('--toolchain-dir', help='The directory containing the VS '
  490. 'toolchain that has been used for this build. All the files present in '
  491. 'this directory (or one of its subdirectories) will be skipped.')
  492. parser.add_option('--follow-junctions', action='store_true',help='Indicates '
  493. 'if the junctions should be followed while doing the indexing.',
  494. default=False)
  495. options, args = parser.parse_args()
  496. if not args:
  497. parser.error('Specify a pdb.')
  498. if not options.toolchain_dir:
  499. parser.error('The toolchain directory should be specified.')
  500. for pdb in args:
  501. UpdatePDB(pdb, options.verbose, options.build_dir, options.toolchain_dir,
  502. options.follow_junctions)
  503. return 0
  504. if __name__ == '__main__':
  505. sys.exit(main())