assemble_changelog.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523
  1. #!/usr/bin/env python3
  2. """Assemble Mbed TLS change log entries into the change log file.
  3. Add changelog entries to the first level-2 section.
  4. Create a new level-2 section for unreleased changes if needed.
  5. Remove the input files unless --keep-entries is specified.
  6. In each level-3 section, entries are sorted in chronological order
  7. (oldest first). From oldest to newest:
  8. * Merged entry files are sorted according to their merge date (date of
  9. the merge commit that brought the commit that created the file into
  10. the target branch).
  11. * Committed but unmerged entry files are sorted according to the date
  12. of the commit that adds them.
  13. * Uncommitted entry files are sorted according to their modification time.
  14. You must run this program from within a git working directory.
  15. """
  16. # Copyright The Mbed TLS Contributors
  17. # SPDX-License-Identifier: Apache-2.0
  18. #
  19. # Licensed under the Apache License, Version 2.0 (the "License"); you may
  20. # not use this file except in compliance with the License.
  21. # You may obtain a copy of the License at
  22. #
  23. # http://www.apache.org/licenses/LICENSE-2.0
  24. #
  25. # Unless required by applicable law or agreed to in writing, software
  26. # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  27. # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  28. # See the License for the specific language governing permissions and
  29. # limitations under the License.
  30. import argparse
  31. from collections import OrderedDict, namedtuple
  32. import datetime
  33. import functools
  34. import glob
  35. import os
  36. import re
  37. import subprocess
  38. import sys
  39. class InputFormatError(Exception):
  40. def __init__(self, filename, line_number, message, *args, **kwargs):
  41. message = '{}:{}: {}'.format(filename, line_number,
  42. message.format(*args, **kwargs))
  43. super().__init__(message)
  44. class CategoryParseError(Exception):
  45. def __init__(self, line_offset, error_message):
  46. self.line_offset = line_offset
  47. self.error_message = error_message
  48. super().__init__('{}: {}'.format(line_offset, error_message))
  49. class LostContent(Exception):
  50. def __init__(self, filename, line):
  51. message = ('Lost content from {}: "{}"'.format(filename, line))
  52. super().__init__(message)
  53. # The category names we use in the changelog.
  54. # If you edit this, update ChangeLog.d/README.md.
  55. STANDARD_CATEGORIES = (
  56. b'API changes',
  57. b'Default behavior changes',
  58. b'Requirement changes',
  59. b'New deprecations',
  60. b'Removals',
  61. b'Features',
  62. b'Security',
  63. b'Bugfix',
  64. b'Changes',
  65. )
  66. # The maximum line length for an entry
  67. MAX_LINE_LENGTH = 80
  68. CategoryContent = namedtuple('CategoryContent', [
  69. 'name', 'title_line', # Title text and line number of the title
  70. 'body', 'body_line', # Body text and starting line number of the body
  71. ])
  72. class ChangelogFormat:
  73. """Virtual class documenting how to write a changelog format class."""
  74. @classmethod
  75. def extract_top_version(cls, changelog_file_content):
  76. """Split out the top version section.
  77. If the top version is already released, create a new top
  78. version section for an unreleased version.
  79. Return ``(header, top_version_title, top_version_body, trailer)``
  80. where the "top version" is the existing top version section if it's
  81. for unreleased changes, and a newly created section otherwise.
  82. To assemble the changelog after modifying top_version_body,
  83. concatenate the four pieces.
  84. """
  85. raise NotImplementedError
  86. @classmethod
  87. def version_title_text(cls, version_title):
  88. """Return the text of a formatted version section title."""
  89. raise NotImplementedError
  90. @classmethod
  91. def split_categories(cls, version_body):
  92. """Split a changelog version section body into categories.
  93. Return a list of `CategoryContent` the name is category title
  94. without any formatting.
  95. """
  96. raise NotImplementedError
  97. @classmethod
  98. def format_category(cls, title, body):
  99. """Construct the text of a category section from its title and body."""
  100. raise NotImplementedError
  101. class TextChangelogFormat(ChangelogFormat):
  102. """The traditional Mbed TLS changelog format."""
  103. _unreleased_version_text = b'= mbed TLS x.x.x branch released xxxx-xx-xx'
  104. @classmethod
  105. def is_released_version(cls, title):
  106. # Look for an incomplete release date
  107. return not re.search(br'[0-9x]{4}-[0-9x]{2}-[0-9x]?x', title)
  108. _top_version_re = re.compile(br'(?:\A|\n)(=[^\n]*\n+)(.*?\n)(?:=|$)',
  109. re.DOTALL)
  110. @classmethod
  111. def extract_top_version(cls, changelog_file_content):
  112. """A version section starts with a line starting with '='."""
  113. m = re.search(cls._top_version_re, changelog_file_content)
  114. top_version_start = m.start(1)
  115. top_version_end = m.end(2)
  116. top_version_title = m.group(1)
  117. top_version_body = m.group(2)
  118. if cls.is_released_version(top_version_title):
  119. top_version_end = top_version_start
  120. top_version_title = cls._unreleased_version_text + b'\n\n'
  121. top_version_body = b''
  122. return (changelog_file_content[:top_version_start],
  123. top_version_title, top_version_body,
  124. changelog_file_content[top_version_end:])
  125. @classmethod
  126. def version_title_text(cls, version_title):
  127. return re.sub(br'\n.*', version_title, re.DOTALL)
  128. _category_title_re = re.compile(br'(^\w.*)\n+', re.MULTILINE)
  129. @classmethod
  130. def split_categories(cls, version_body):
  131. """A category title is a line with the title in column 0."""
  132. if not version_body:
  133. return []
  134. title_matches = list(re.finditer(cls._category_title_re, version_body))
  135. if not title_matches or title_matches[0].start() != 0:
  136. # There is junk before the first category.
  137. raise CategoryParseError(0, 'Junk found where category expected')
  138. title_starts = [m.start(1) for m in title_matches]
  139. body_starts = [m.end(0) for m in title_matches]
  140. body_ends = title_starts[1:] + [len(version_body)]
  141. bodies = [version_body[body_start:body_end].rstrip(b'\n') + b'\n'
  142. for (body_start, body_end) in zip(body_starts, body_ends)]
  143. title_lines = [version_body[:pos].count(b'\n') for pos in title_starts]
  144. body_lines = [version_body[:pos].count(b'\n') for pos in body_starts]
  145. return [CategoryContent(title_match.group(1), title_line,
  146. body, body_line)
  147. for title_match, title_line, body, body_line
  148. in zip(title_matches, title_lines, bodies, body_lines)]
  149. @classmethod
  150. def format_category(cls, title, body):
  151. # `split_categories` ensures that each body ends with a newline.
  152. # Make sure that there is additionally a blank line between categories.
  153. if not body.endswith(b'\n\n'):
  154. body += b'\n'
  155. return title + b'\n' + body
  156. class ChangeLog:
  157. """An Mbed TLS changelog.
  158. A changelog file consists of some header text followed by one or
  159. more version sections. The version sections are in reverse
  160. chronological order. Each version section consists of a title and a body.
  161. The body of a version section consists of zero or more category
  162. subsections. Each category subsection consists of a title and a body.
  163. A changelog entry file has the same format as the body of a version section.
  164. A `ChangelogFormat` object defines the concrete syntax of the changelog.
  165. Entry files must have the same format as the changelog file.
  166. """
  167. # Only accept dotted version numbers (e.g. "3.1", not "3").
  168. # Refuse ".x" in a version number where x is a letter: this indicates
  169. # a version that is not yet released. Something like "3.1a" is accepted.
  170. _version_number_re = re.compile(br'[0-9]+\.[0-9A-Za-z.]+')
  171. _incomplete_version_number_re = re.compile(br'.*\.[A-Za-z]')
  172. _only_url_re = re.compile(br'^\s*\w+://\S+\s*$')
  173. _has_url_re = re.compile(br'.*://.*')
  174. def add_categories_from_text(self, filename, line_offset,
  175. text, allow_unknown_category):
  176. """Parse a version section or entry file."""
  177. try:
  178. categories = self.format.split_categories(text)
  179. except CategoryParseError as e:
  180. raise InputFormatError(filename, line_offset + e.line_offset,
  181. e.error_message)
  182. for category in categories:
  183. if not allow_unknown_category and \
  184. category.name not in self.categories:
  185. raise InputFormatError(filename,
  186. line_offset + category.title_line,
  187. 'Unknown category: "{}"',
  188. category.name.decode('utf8'))
  189. body_split = category.body.splitlines()
  190. for line_number, line in enumerate(body_split, 1):
  191. if not self._only_url_re.match(line) and \
  192. len(line) > MAX_LINE_LENGTH:
  193. long_url_msg = '. URL exceeding length limit must be alone in its line.' \
  194. if self._has_url_re.match(line) else ""
  195. raise InputFormatError(filename,
  196. category.body_line + line_number,
  197. 'Line is longer than allowed: '
  198. 'Length {} (Max {}){}',
  199. len(line), MAX_LINE_LENGTH,
  200. long_url_msg)
  201. self.categories[category.name] += category.body
  202. def __init__(self, input_stream, changelog_format):
  203. """Create a changelog object.
  204. Populate the changelog object from the content of the file
  205. input_stream.
  206. """
  207. self.format = changelog_format
  208. whole_file = input_stream.read()
  209. (self.header,
  210. self.top_version_title, top_version_body,
  211. self.trailer) = self.format.extract_top_version(whole_file)
  212. # Split the top version section into categories.
  213. self.categories = OrderedDict()
  214. for category in STANDARD_CATEGORIES:
  215. self.categories[category] = b''
  216. offset = (self.header + self.top_version_title).count(b'\n') + 1
  217. self.add_categories_from_text(input_stream.name, offset,
  218. top_version_body, True)
  219. def add_file(self, input_stream):
  220. """Add changelog entries from a file.
  221. """
  222. self.add_categories_from_text(input_stream.name, 1,
  223. input_stream.read(), False)
  224. def write(self, filename):
  225. """Write the changelog to the specified file.
  226. """
  227. with open(filename, 'wb') as out:
  228. out.write(self.header)
  229. out.write(self.top_version_title)
  230. for title, body in self.categories.items():
  231. if not body:
  232. continue
  233. out.write(self.format.format_category(title, body))
  234. out.write(self.trailer)
  235. @functools.total_ordering
  236. class EntryFileSortKey:
  237. """This classes defines an ordering on changelog entry files: older < newer.
  238. * Merged entry files are sorted according to their merge date (date of
  239. the merge commit that brought the commit that created the file into
  240. the target branch).
  241. * Committed but unmerged entry files are sorted according to the date
  242. of the commit that adds them.
  243. * Uncommitted entry files are sorted according to their modification time.
  244. This class assumes that the file is in a git working directory with
  245. the target branch checked out.
  246. """
  247. # Categories of files. A lower number is considered older.
  248. MERGED = 0
  249. COMMITTED = 1
  250. LOCAL = 2
  251. @staticmethod
  252. def creation_hash(filename):
  253. """Return the git commit id at which the given file was created.
  254. Return None if the file was never checked into git.
  255. """
  256. hashes = subprocess.check_output(['git', 'log', '--format=%H',
  257. '--follow',
  258. '--', filename])
  259. m = re.search(b'(.+)$', hashes)
  260. if not m:
  261. # The git output is empty. This means that the file was
  262. # never checked in.
  263. return None
  264. # The last commit in the log is the oldest one, which is when the
  265. # file was created.
  266. return m.group(0)
  267. @staticmethod
  268. def list_merges(some_hash, target, *options):
  269. """List merge commits from some_hash to target.
  270. Pass options to git to select which commits are included.
  271. """
  272. text = subprocess.check_output(['git', 'rev-list',
  273. '--merges', *options,
  274. b'..'.join([some_hash, target])])
  275. return text.rstrip(b'\n').split(b'\n')
  276. @classmethod
  277. def merge_hash(cls, some_hash):
  278. """Return the git commit id at which the given commit was merged.
  279. Return None if the given commit was never merged.
  280. """
  281. target = b'HEAD'
  282. # List the merges from some_hash to the target in two ways.
  283. # The ancestry list is the ones that are both descendants of
  284. # some_hash and ancestors of the target.
  285. ancestry = frozenset(cls.list_merges(some_hash, target,
  286. '--ancestry-path'))
  287. # The first_parents list only contains merges that are directly
  288. # on the target branch. We want it in reverse order (oldest first).
  289. first_parents = cls.list_merges(some_hash, target,
  290. '--first-parent', '--reverse')
  291. # Look for the oldest merge commit that's both on the direct path
  292. # and directly on the target branch. That's the place where some_hash
  293. # was merged on the target branch. See
  294. # https://stackoverflow.com/questions/8475448/find-merge-commit-which-include-a-specific-commit
  295. for commit in first_parents:
  296. if commit in ancestry:
  297. return commit
  298. return None
  299. @staticmethod
  300. def commit_timestamp(commit_id):
  301. """Return the timestamp of the given commit."""
  302. text = subprocess.check_output(['git', 'show', '-s',
  303. '--format=%ct',
  304. commit_id])
  305. return datetime.datetime.utcfromtimestamp(int(text))
  306. @staticmethod
  307. def file_timestamp(filename):
  308. """Return the modification timestamp of the given file."""
  309. mtime = os.stat(filename).st_mtime
  310. return datetime.datetime.fromtimestamp(mtime)
  311. def __init__(self, filename):
  312. """Determine position of the file in the changelog entry order.
  313. This constructor returns an object that can be used with comparison
  314. operators, with `sort` and `sorted`, etc. Older entries are sorted
  315. before newer entries.
  316. """
  317. self.filename = filename
  318. creation_hash = self.creation_hash(filename)
  319. if not creation_hash:
  320. self.category = self.LOCAL
  321. self.datetime = self.file_timestamp(filename)
  322. return
  323. merge_hash = self.merge_hash(creation_hash)
  324. if not merge_hash:
  325. self.category = self.COMMITTED
  326. self.datetime = self.commit_timestamp(creation_hash)
  327. return
  328. self.category = self.MERGED
  329. self.datetime = self.commit_timestamp(merge_hash)
  330. def sort_key(self):
  331. """"Return a concrete sort key for this entry file sort key object.
  332. ``ts1 < ts2`` is implemented as ``ts1.sort_key() < ts2.sort_key()``.
  333. """
  334. return (self.category, self.datetime, self.filename)
  335. def __eq__(self, other):
  336. return self.sort_key() == other.sort_key()
  337. def __lt__(self, other):
  338. return self.sort_key() < other.sort_key()
  339. def check_output(generated_output_file, main_input_file, merged_files):
  340. """Make sanity checks on the generated output.
  341. The intent of these sanity checks is to have reasonable confidence
  342. that no content has been lost.
  343. The sanity check is that every line that is present in an input file
  344. is also present in an output file. This is not perfect but good enough
  345. for now.
  346. """
  347. generated_output = set(open(generated_output_file, 'rb'))
  348. for line in open(main_input_file, 'rb'):
  349. if line not in generated_output:
  350. raise LostContent('original file', line)
  351. for merged_file in merged_files:
  352. for line in open(merged_file, 'rb'):
  353. if line not in generated_output:
  354. raise LostContent(merged_file, line)
  355. def finish_output(changelog, output_file, input_file, merged_files):
  356. """Write the changelog to the output file.
  357. The input file and the list of merged files are used only for sanity
  358. checks on the output.
  359. """
  360. if os.path.exists(output_file) and not os.path.isfile(output_file):
  361. # The output is a non-regular file (e.g. pipe). Write to it directly.
  362. output_temp = output_file
  363. else:
  364. # The output is a regular file. Write to a temporary file,
  365. # then move it into place atomically.
  366. output_temp = output_file + '.tmp'
  367. changelog.write(output_temp)
  368. check_output(output_temp, input_file, merged_files)
  369. if output_temp != output_file:
  370. os.rename(output_temp, output_file)
  371. def remove_merged_entries(files_to_remove):
  372. for filename in files_to_remove:
  373. os.remove(filename)
  374. def list_files_to_merge(options):
  375. """List the entry files to merge, oldest first.
  376. "Oldest" is defined by `EntryFileSortKey`.
  377. """
  378. files_to_merge = glob.glob(os.path.join(options.dir, '*.txt'))
  379. files_to_merge.sort(key=EntryFileSortKey)
  380. return files_to_merge
  381. def merge_entries(options):
  382. """Merge changelog entries into the changelog file.
  383. Read the changelog file from options.input.
  384. Read entries to merge from the directory options.dir.
  385. Write the new changelog to options.output.
  386. Remove the merged entries if options.keep_entries is false.
  387. """
  388. with open(options.input, 'rb') as input_file:
  389. changelog = ChangeLog(input_file, TextChangelogFormat)
  390. files_to_merge = list_files_to_merge(options)
  391. if not files_to_merge:
  392. sys.stderr.write('There are no pending changelog entries.\n')
  393. return
  394. for filename in files_to_merge:
  395. with open(filename, 'rb') as input_file:
  396. changelog.add_file(input_file)
  397. finish_output(changelog, options.output, options.input, files_to_merge)
  398. if not options.keep_entries:
  399. remove_merged_entries(files_to_merge)
  400. def show_file_timestamps(options):
  401. """List the files to merge and their timestamp.
  402. This is only intended for debugging purposes.
  403. """
  404. files = list_files_to_merge(options)
  405. for filename in files:
  406. ts = EntryFileSortKey(filename)
  407. print(ts.category, ts.datetime, filename)
  408. def set_defaults(options):
  409. """Add default values for missing options."""
  410. output_file = getattr(options, 'output', None)
  411. if output_file is None:
  412. options.output = options.input
  413. if getattr(options, 'keep_entries', None) is None:
  414. options.keep_entries = (output_file is not None)
  415. def main():
  416. """Command line entry point."""
  417. parser = argparse.ArgumentParser(description=__doc__)
  418. parser.add_argument('--dir', '-d', metavar='DIR',
  419. default='ChangeLog.d',
  420. help='Directory to read entries from'
  421. ' (default: ChangeLog.d)')
  422. parser.add_argument('--input', '-i', metavar='FILE',
  423. default='ChangeLog',
  424. help='Existing changelog file to read from and augment'
  425. ' (default: ChangeLog)')
  426. parser.add_argument('--keep-entries',
  427. action='store_true', dest='keep_entries', default=None,
  428. help='Keep the files containing entries'
  429. ' (default: remove them if --output/-o is not specified)')
  430. parser.add_argument('--no-keep-entries',
  431. action='store_false', dest='keep_entries',
  432. help='Remove the files containing entries after they are merged'
  433. ' (default: remove them if --output/-o is not specified)')
  434. parser.add_argument('--output', '-o', metavar='FILE',
  435. help='Output changelog file'
  436. ' (default: overwrite the input)')
  437. parser.add_argument('--list-files-only',
  438. action='store_true',
  439. help=('Only list the files that would be processed '
  440. '(with some debugging information)'))
  441. options = parser.parse_args()
  442. set_defaults(options)
  443. if options.list_files_only:
  444. show_file_timestamps(options)
  445. return
  446. merge_entries(options)
  447. if __name__ == '__main__':
  448. main()