2
0

gen-build.py 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. #!/usr/bin/env python
  2. #
  3. # USAGE: gen-build.py TYPE
  4. #
  5. # where TYPE is one of: make, dsp, vcproj
  6. #
  7. # It reads build.conf from the current directory, and produces its output
  8. # into the current directory.
  9. #
  10. import os
  11. import ConfigParser
  12. import getopt
  13. import string
  14. import glob
  15. import re
  16. #import ezt
  17. #
  18. # legal platforms: aix, beos, netware, os2, os390, unix, win32
  19. # 'make' users: aix, beos, os2, os390, unix
  20. #
  21. PLATFORMS = [ 'aix', 'beos', 'netware', 'os2', 'os390', 'unix', 'win32' ]
  22. MAKE_PLATFORMS = [
  23. ('unix', None),
  24. ('aix', 'unix'),
  25. ('beos', 'unix'),
  26. ('os2', 'unix'),
  27. ('os390', 'unix'),
  28. ]
  29. # note: MAKE_PLATFORMS is an ordered set. we want to generate unix symbols
  30. # first, so that the later platforms can reference them.
  31. def main():
  32. parser = ConfigParser.ConfigParser()
  33. parser.read('build.conf')
  34. headers = get_files(parser.get('options', 'headers'))
  35. # compute the relevant headers, along with the implied includes
  36. legal_deps = { }
  37. for fname in headers:
  38. legal_deps[os.path.basename(fname)] = fname
  39. h_deps = { }
  40. for fname in headers:
  41. h_deps[os.path.basename(fname)] = extract_deps(fname, legal_deps)
  42. resolve_deps(h_deps)
  43. f = open('build-outputs.mk', 'w')
  44. f.write('# DO NOT EDIT. AUTOMATICALLY GENERATED.\n\n')
  45. # write out the platform-independent files
  46. files = get_files(parser.get('options', 'paths'))
  47. objects, dirs = write_objects(f, legal_deps, h_deps, files)
  48. f.write('\nOBJECTS_all = %s\n\n' % string.join(objects))
  49. # for each platform and each subdirectory holding platform-specific files,
  50. # write out their compilation rules, and an OBJECT_<subdir>_<plat> symbol.
  51. for platform, parent in MAKE_PLATFORMS:
  52. # record the object symbols to build for each platform
  53. group = [ '$(OBJECTS_all)' ]
  54. for subdir in string.split(parser.get('options', 'platform_dirs')):
  55. path = '%s/%s' % (subdir, platform)
  56. if not os.path.exists(path):
  57. # this subdir doesn't have a subdir for this platform, so we'll
  58. # use the parent-platform's set of symbols
  59. if parent:
  60. group.append('$(OBJECTS_%s_%s)' % (subdir, parent))
  61. continue
  62. # remember that this directory has files/objects
  63. dirs[path] = None
  64. # write out the compilation lines for this subdir
  65. files = get_files(path + '/*.c')
  66. objects, _unused = write_objects(f, legal_deps, h_deps, files)
  67. symname = 'OBJECTS_%s_%s' % (subdir, platform)
  68. # and write the symbol for the whole group
  69. f.write('\n%s = %s\n\n' % (symname, string.join(objects)))
  70. # and include that symbol in the group
  71. group.append('$(%s)' % symname)
  72. # write out a symbol which contains the necessary files
  73. f.write('OBJECTS_%s = %s\n\n' % (platform, string.join(group)))
  74. f.write('HEADERS = $(top_srcdir)/%s\n\n' % string.join(headers, ' $(top_srcdir)/'))
  75. f.write('SOURCE_DIRS = %s $(EXTRA_SOURCE_DIRS)\n\n' % string.join(dirs.keys()))
  76. # Build a list of all necessary directories in build tree
  77. alldirs = { }
  78. for dir in dirs.keys():
  79. d = dir
  80. while d:
  81. alldirs[d] = None
  82. d = os.path.dirname(d)
  83. # Sort so 'foo' is before 'foo/bar'
  84. keys = alldirs.keys()
  85. keys.sort()
  86. f.write('BUILD_DIRS = %s\n\n' % string.join(keys))
  87. f.write('.make.dirs: $(srcdir)/build-outputs.mk\n' \
  88. '\t@for d in $(BUILD_DIRS); do test -d $$d || mkdir $$d; done\n' \
  89. '\t@echo timestamp > $@\n')
  90. def write_objects(f, legal_deps, h_deps, files):
  91. dirs = { }
  92. objects = [ ]
  93. for file in files:
  94. assert file[-2:] == '.c'
  95. obj = file[:-2] + '.lo'
  96. objects.append(obj)
  97. dirs[os.path.dirname(file)] = None
  98. # what headers does this file include, along with the implied headers
  99. deps = extract_deps(file, legal_deps)
  100. for hdr in deps.keys():
  101. deps.update(h_deps.get(hdr, {}))
  102. f.write('%s: %s .make.dirs %s\n' % (obj, file, string.join(deps.values())))
  103. return objects, dirs
  104. def extract_deps(fname, legal_deps):
  105. "Extract the headers this file includes."
  106. deps = { }
  107. for line in open(fname).readlines():
  108. if line[:8] != '#include':
  109. continue
  110. inc = _re_include.match(line).group(1)
  111. if inc in legal_deps.keys():
  112. deps[inc] = legal_deps[inc]
  113. return deps
  114. _re_include = re.compile('#include *["<](.*)[">]')
  115. def resolve_deps(header_deps):
  116. "Alter the provided dictionary to flatten includes-of-includes."
  117. altered = 1
  118. while altered:
  119. altered = 0
  120. for hdr, deps in header_deps.items():
  121. # print hdr, deps
  122. start = len(deps)
  123. for dep in deps.keys():
  124. deps.update(header_deps.get(dep, {}))
  125. if len(deps) != start:
  126. altered = 1
  127. def get_files(patterns):
  128. files = [ ]
  129. for pat in string.split(patterns):
  130. files.extend(glob.glob(pat))
  131. return files
  132. if __name__ == '__main__':
  133. main()