generate_javascript_parser_proto.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
  1. #!/usr/bin/env python3
  2. # Copyright 2017 The Chromium Authors. All rights reserved.
  3. # Use of this source code is governed by a BSD-style license that can be
  4. # found in the LICENSE file.
  5. """
  6. Script for generating .proto and a conversion .cc file for a templated library
  7. based JavaScript parser fuzzer.
  8. """
  9. import sys
  10. def ParseWord(word_string):
  11. # Every part of the word is either a string surrounded by "" or a placeholder
  12. # $<int>.
  13. word_string = word_string.lstrip().rstrip()
  14. parts = []
  15. while len(word_string) > 0:
  16. if word_string[0] == '"':
  17. end_ix = 1 + word_string[1:].index('"')
  18. parts.append(word_string[1:end_ix])
  19. word_string = word_string[(end_ix + 1):]
  20. elif word_string[0] == '$':
  21. if ' ' in word_string:
  22. end_ix = word_string.index(' ')
  23. else:
  24. end_ix = len(word_string)
  25. parts.append(int(word_string[1:end_ix]))
  26. word_string = word_string[end_ix:]
  27. else:
  28. assert(False)
  29. word_string = word_string.lstrip()
  30. return parts
  31. def GenerateProtoContents(words):
  32. contents = ''
  33. for ix in range(len(words)):
  34. contents += ' token_value_' + str(ix) + ' = ' + str(ix) + ';\n'
  35. return contents
  36. def GenerateConversionContents(words):
  37. contents = ''
  38. ix = 0
  39. for word in words:
  40. contents += ' case ' + str(ix) + ':\n'
  41. max_part = -1
  42. first = True
  43. building_string = ''
  44. for part in word:
  45. if not first:
  46. building_string += ' + std::string(" ") + '
  47. if isinstance(part, str):
  48. building_string += 'std::string("' + part + '")'
  49. else:
  50. if (part > max_part):
  51. max_part = part
  52. building_string += ('token_to_string(token.inner_tokens(' + str(part) +
  53. '), depth)')
  54. first = False
  55. if max_part >= 0:
  56. contents += (' if (token.inner_tokens().size() < ' +
  57. str(max_part + 1) + ') return std::string("");\n')
  58. contents += ' return ' + building_string + ';\n'
  59. ix += 1
  60. return contents
  61. def ReadDictionary(filename):
  62. with open(filename) as input_file:
  63. lines = input_file.readlines()
  64. words = []
  65. for line in lines:
  66. if not line.startswith('#'):
  67. word = ParseWord(line)
  68. if len(word) > 0:
  69. words.append(word)
  70. return words
  71. def main(argv):
  72. output_proto_file = argv[1]
  73. output_cc_file = argv[2]
  74. input_dict_file = argv[3]
  75. words = ReadDictionary(input_dict_file)
  76. proto_header = ('// Generated by generate_javascript_parser_proto.py.\n'
  77. '\n'
  78. 'syntax = "proto2";\n'
  79. 'package javascript_parser_proto_fuzzer;\n'
  80. '\n'
  81. 'message Token {\n'
  82. ' enum Value {\n')
  83. proto_footer = (' }\n'
  84. ' required Value value = 1;\n'
  85. ' repeated Token inner_tokens = 2;\n'
  86. '}\n'
  87. '\n'
  88. 'message Source {\n'
  89. ' required bool is_module = 1;\n'
  90. ' repeated Token tokens = 2;\n'
  91. '}\n')
  92. proto_contents = proto_header + GenerateProtoContents(words) + proto_footer
  93. with open(output_proto_file, 'w') as f:
  94. f.write(proto_contents)
  95. conversion_header = (
  96. '// Generated by generate_javascript_parser_proto.py.\n'
  97. '\n'
  98. '#include "testing/libfuzzer/fuzzers/'
  99. 'javascript_parser_proto_to_string.h"\n'
  100. '\n'
  101. '// Bound calls to token_to_string to prevent memory usage from growing\n'
  102. '// too much.\n'
  103. 'const int kMaxRecursiveDepth = 9;\n'
  104. '\n'
  105. 'std::string token_to_string(\n'
  106. ' const javascript_parser_proto_fuzzer::Token& token, int depth)'
  107. ' {\n'
  108. ' if (++depth == kMaxRecursiveDepth) return std::string("");\n'
  109. ' switch(token.value()) {\n')
  110. conversion_footer = (' default: break;\n'
  111. ' }\n'
  112. ' return std::string("");\n'
  113. '}\n')
  114. conversion_contents = (conversion_header + GenerateConversionContents(words)
  115. + conversion_footer)
  116. with open(output_cc_file, 'w') as f:
  117. f.write(conversion_contents)
  118. if __name__ == "__main__":
  119. main(sys.argv)