gen-lockedsig-cache 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. #!/usr/bin/env python3
  2. #
  3. #
  4. # Copyright OpenEmbedded Contributors
  5. #
  6. # SPDX-License-Identifier: GPL-2.0-only
  7. #
  8. import os
  9. import sys
  10. import shutil
  11. import errno
  12. import time
  13. def mkdir(d):
  14. try:
  15. os.makedirs(d)
  16. except OSError as e:
  17. if e.errno != errno.EEXIST:
  18. raise e
  19. # extract the hash from past the last colon to last underscore
  20. def extract_sha(filename):
  21. return filename.split(':')[7].split('_')[0]
  22. # get all files in a directory, extract hash and make
  23. # a map from hash to list of file with that hash
  24. def map_sha_to_files(dir_, prefix, sha_map):
  25. sstate_prefix_path = dir_ + '/' + prefix + '/'
  26. if not os.path.exists(sstate_prefix_path):
  27. return
  28. sstate_files = os.listdir(sstate_prefix_path)
  29. for f in sstate_files:
  30. try:
  31. sha = extract_sha(f)
  32. if sha not in sha_map:
  33. sha_map[sha] = []
  34. sha_map[sha].append(sstate_prefix_path + f)
  35. except IndexError:
  36. continue
  37. # given a prefix build a map of hash to list of files
  38. def build_sha_cache(prefix):
  39. sha_map = {}
  40. sstate_dir = sys.argv[2]
  41. map_sha_to_files(sstate_dir, prefix, sha_map)
  42. native_sstate_dir = sys.argv[2] + '/' + sys.argv[4]
  43. map_sha_to_files(native_sstate_dir, prefix, sha_map)
  44. return sha_map
  45. if len(sys.argv) < 5:
  46. print("Incorrect number of arguments specified")
  47. print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir> <nativelsbstring> [filterfile]")
  48. sys.exit(1)
  49. filterlist = []
  50. if len(sys.argv) > 5:
  51. print('Reading filter file %s' % sys.argv[5])
  52. with open(sys.argv[5]) as f:
  53. for l in f.readlines():
  54. if ":" in l:
  55. filterlist.append(l.rstrip())
  56. print('Reading %s' % sys.argv[1])
  57. sigs = []
  58. with open(sys.argv[1]) as f:
  59. for l in f.readlines():
  60. if ":" in l:
  61. task, sig = l.split()[0].rsplit(':', 1)
  62. if filterlist and not task in filterlist:
  63. print('Filtering out %s' % task)
  64. else:
  65. sigs.append(sig)
  66. print('Gathering file list')
  67. start_time = time.perf_counter()
  68. files = set()
  69. sstate_content_cache = {}
  70. for s in sigs:
  71. prefix = s[:2]
  72. prefix2 = s[2:4]
  73. if prefix not in sstate_content_cache:
  74. sstate_content_cache[prefix] = {}
  75. if prefix2 not in sstate_content_cache[prefix]:
  76. sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)
  77. if s in sstate_content_cache[prefix][prefix2]:
  78. for f in sstate_content_cache[prefix][prefix2][s]:
  79. files.add(f)
  80. elapsed = time.perf_counter() - start_time
  81. print("Gathering file list took %.1fs" % elapsed)
  82. print('Processing files')
  83. for f in files:
  84. sys.stdout.write('Processing %s... ' % f)
  85. if not f.endswith(('.tar.zst', '.siginfo', '.sig')):
  86. # Most likely a temp file, skip it
  87. print('skipping')
  88. continue
  89. dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2]))
  90. destdir = os.path.dirname(dst)
  91. mkdir(destdir)
  92. src = os.path.realpath(f)
  93. if os.path.exists(dst):
  94. os.remove(dst)
  95. if (os.stat(src).st_dev == os.stat(destdir).st_dev):
  96. print('linking')
  97. try:
  98. os.link(src, dst)
  99. except OSError as e:
  100. print('hard linking failed, copying')
  101. shutil.copyfile(src, dst)
  102. else:
  103. print('copying')
  104. shutil.copyfile(src, dst)
  105. print('Done!')