Belle II Software  release-08-01-10
process_dir.py
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 
4 
11 
12 import os
13 import stat
14 from SCons.Script import Environment, File, Flatten, Glob, SConscript
15 
16 
17 def define_aliases(
18  env,
19  target,
20  dir_name,
21  extension=None,
22 ):
23 
24  parent_dir = dir_name
25  while len(parent_dir) > 0:
26  env.Alias(parent_dir, target)
27  if extension:
28  env.Alias(parent_dir + '.' + extension, target)
29 
30  next_parent_dir = os.path.split(parent_dir)[0]
31  # check split actually does something here
32  if next_parent_dir == parent_dir:
33  break
34  parent_dir = next_parent_dir
35 
36  if extension:
37  env.Alias(extension, target)
38 
39 
40 def get_files(path_name):
41  result = Glob(path_name)
42  files = [f for f in result if not os.path.isdir(str(f))]
43  return files
44 
45 
46 def get_python_files_recursive(topdir_path):
47  python_file_nodes = []
48 
49  for (dir_path, dir_names, file_names) in os.walk(topdir_path):
50  for file_name in file_names:
51  if file_name.endswith('.py'):
52  file_path = os.path.join(dir_path, file_name)
53  file_node = File(file_path)
54  python_file_nodes.append(file_node)
55 
56  return python_file_nodes
57 
58 
59 def process_dir(
60  parent_env,
61  dir_name,
62  is_module_dir
63 ):
64 
65  # remove leading ./
66  if dir_name.startswith('./'):
67  dir_name = dir_name[2:]
68 
69  # determine library name
70  if dir_name == '.':
71  lib_name = parent_env['PACKAGE']
72  else:
73  lib_name = dir_name.replace(os.sep, '_')
74 
75  # get list of header and linkdef files
76  header_files = get_files(os.path.join(dir_name, '*.h'))
77  if dir_name != '.':
78  header_files += get_files(os.path.join(dir_name, 'include', '*.h'))
79  linkdef_files = []
80  for header_file in header_files:
81  if str(header_file).lower().endswith('linkdef.h'):
82  linkdef_files.append(header_file)
83  header_files.remove(header_file)
84 
85  # get list of source files
86  c_sources = get_files(os.path.join(dir_name, '*.cc')) \
87  + get_files(os.path.join(dir_name, 'src', '*.cc')) \
88  + get_files(os.path.join(dir_name, '*.c')) \
89  + get_files(os.path.join(dir_name, 'src', '*.c'))
90  fortran_sources = get_files(os.path.join(dir_name, '*.f')) \
91  + get_files(os.path.join(dir_name, 'src', '*.f')) \
92  + get_files(os.path.join(dir_name, '*.F')) \
93  + get_files(os.path.join(dir_name, 'src', '*.F')) \
94  + get_files(os.path.join(dir_name, '*.f90')) \
95  + get_files(os.path.join(dir_name, 'src', '*.f90')) \
96  + get_files(os.path.join(dir_name, '*.F90')) \
97  + get_files(os.path.join(dir_name, 'src', '*.F90'))
98  src_nodes = c_sources + fortran_sources
99  src_files = [os.path.join(parent_env['BUILDDIR'], str(node)) for node in
100  src_nodes]
101 
102  # get list of test files
103  test_files = [os.path.join(parent_env['BUILDDIR'], str(node)) for node in
104  get_files(os.path.join(dir_name, 'tests', '*.cc'))]
105 
106  # get list of script files
107  script_files = get_python_files_recursive(os.path.join(dir_name, 'scripts'
108  ))
109 
110  # get list of executable script files
111  executable_files = []
112  executable_mode = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
113  for tools_file in get_files(os.path.join(dir_name, 'tools', '*')):
114  executable_file = str(tools_file)
115  if os.stat(executable_file).st_mode & executable_mode \
116  == executable_mode:
117  executable_files.append(tools_file)
118 
119  # get list of data files
120  data_files = get_files(os.path.join(dir_name, 'data', '*'))
121 
122  # create environment for directory
123  env = parent_env.Clone()
124  env['HEADER_FILES'] = header_files
125  env['LINKDEF_FILES'] = linkdef_files
126  env['SRC_FILES'] = src_files
127  env['TEST_FILES'] = test_files
128  env['TEST_LIBS'] = []
129  env['SCRIPT_FILES'] = script_files
130  env['EXECUTABLE_FILES'] = executable_files
131  env['DATA_FILES'] = data_files
132  env['DATAOBJECT_LIB'] = []
133  env['DATAOBJECT_LIBS'] = []
134 
135  # clean up some environment variables that should not be inherited from the parent environment
136  if 'SUBLIB' in env.Dictionary():
137  del env.Dictionary()['SUBLIB']
138  if 'PYTHON_MODULE' in env.Dictionary():
139  del env.Dictionary()['PYTHON_MODULE']
140  if 'LIBS' in env.Dictionary():
141  del env.Dictionary()['LIBS']
142 
143  if dir_name in env.get('DISABLE_COMPILER_WARNINGS', []):
144  env.AppendUnique(CXXFLAGS=['-w'], CCFLAGS=['-w'], FORTRANFLAGS=['-w'], LINKFLAGS=['-w'])
145 
146  # link dataobjects to analysis modules
147  if dir_name == '.':
148  env.Append(LIBS=['dataobjects'])
149 
150  # include SConscript file if it exists
151  sconscript_name = os.path.join(dir_name, 'SConscript')
152  if os.path.isfile(sconscript_name):
153  result = SConscript(sconscript_name, exports='env')
154 
155  # use the new environment if it was updated by the SConscript file
156  if isinstance(result, Environment):
157  env = result
158 
159  # don't continue with the default build process if the SConscript file requests this
160  if not env.Dictionary().get('CONTINUE', True):
161  return
162 
163  # Add additional sources.
164  if 'ADDITIONAL_SOURCES' in env.Dictionary():
165  additional_src_nodes = []
166  for source in env.Dictionary()['ADDITIONAL_SOURCES']:
167  additional_src_nodes += get_files(os.path.join(dir_name, source))
168  additional_src_files = [
169  os.path.join(parent_env['BUILDDIR'], str(node)) for node in
170  additional_src_nodes]
171  if (len(additional_src_files) > 0):
172  src_files.append(additional_src_files)
173  env['SRC_FILES'] = src_files
174 
175  # Create Fortran module directory.
176  if len(fortran_sources) > 0:
177  fortran_module_dir = env.Dir('$FORTRANMODDIR').get_abspath()
178  if not os.path.isdir(fortran_module_dir):
179  os.makedirs(fortran_module_dir)
180 
181  # install header files in the include directory
182  includes = env.Install(os.path.join(env['INCDIR'], dir_name),
183  env['HEADER_FILES'])
184  define_aliases(env, includes, dir_name, 'include')
185 
186  # install script files in the library directory
187  script_targets = []
188  for script_file_node in env['SCRIPT_FILES']:
189  script_file_path = str(script_file_node)
190  script_dir = os.path.dirname(script_file_path)
191 
192  destination_reldir = os.path.relpath(script_dir,
193  os.path.join(dir_name, 'scripts'))
194 
195  if not destination_reldir:
196  continue
197  destination_dir = os.path.join(env['LIBDIR'], destination_reldir)
198  script_target = env.Install(destination_dir, script_file_node)
199  script_targets.append(script_target)
200 
201  define_aliases(env, script_targets, dir_name, 'scripts')
202 
203  # install executable script files in the bin directory
204  executables = env.Install(env['BINDIR'], env['EXECUTABLE_FILES'])
205  define_aliases(env, executables, dir_name, 'tools')
206 
207  # install data files in the data directory
208  data = env.Install(os.path.join(env['DATADIR'], dir_name), env['DATA_FILES'
209  ])
210  define_aliases(env, data, dir_name, 'data')
211 
212  # remember tests defined in this directory
213  local_test_files = env['TEST_FILES']
214 
215  # loop over subdirs
216  entries = os.listdir(dir_name)
217  exclude_dirs = set()
218  if (os.path.exists(os.path.join(dir_name, '.excluded_directories'))):
219  f = open('.excluded_directories', 'r')
220  for line in f.readlines():
221  exclude_dirs.add(line.rstrip('\n'))
222  f.close()
223  print(f'Excluded directories: {exclude_dirs}')
224  for entry in entries:
225  if entry in exclude_dirs:
226  continue
227  if entry.find('.') == -1 \
228  and not os.path.isfile(os.path.join(dir_name, entry)) and entry not in [
229  'include',
230  'src',
231  'tools',
232  'tests',
233  'scripts',
234  'data',
235  'doc',
236  'examples',
237  'modules',
238  ]:
239  if dir_name == '.' and entry in [
240  'build',
241  'include',
242  'lib',
243  'bin',
244  'modules',
245  'data',
246  'site_scons',
247  ]:
248  continue
249  process_dir(env, os.path.join(dir_name, entry), is_module_dir and dir_name != '.')
250 
251  # determine whether we are in a special directory
252  is_package_dir = dir_name == env['PACKAGE']
253  is_sublib_dir = env.Dictionary().get('SUBLIB', False)
254  is_python_module_dir = env.Dictionary().get('PYTHON_MODULE', False)
255  is_dataobjects_dir = os.path.basename(dir_name) == 'dataobjects' \
256  and env['PACKAGE'] != 'framework'
257  if dir_name == 'dataobjects':
258  is_dataobjects_dir = True
259  is_module_dir = False
260  lib_name = parent_env['PACKAGE']
261 
262  # check whether we have to create a new library
263  if is_package_dir or is_sublib_dir or is_python_module_dir \
264  or is_module_dir or is_dataobjects_dir:
265 
266  # generate dictionaries
267  dict_files = []
268  aux_dict_targets = []
269  check_files = []
270  for linkdef_file in env['LINKDEF_FILES']:
271  # set the name of library generated at this stage
272  # will be read by the RootDict builder
273  dict_filename = str(linkdef_file).replace(os.sep, '_')[:-9] + 'Dict.cc'
274  dict_file, rootmap_file, rootpcm_file = env.RootDict(os.path.join(env['BUILDDIR'], dict_filename), linkdef_file,
275  ROOTCLING_ROOTMAP_LIB=lib_name)
276  # add the extra cxxflags
277  dict_ccflags = env["CCFLAGS"] + env["ROOTCLING_EXTRA_CCFLAGS"]
278  # add current directory to include path for dictionary compilation
279  dict_files.append(env.SharedObject(dict_file, CPPPATH=['.'] + env['CPPPATH'], CCFLAGS=dict_ccflags))
280 
281  # install corresponding pcm file in lib (for cling)
282  aux_dict_targets.append(env.Copy(os.path.join(env['LIBDIR'], rootpcm_file.name), rootpcm_file))
283  # install corresponding rootmap files to support auto-loading of libraries
284  # once used via ROOT
285  aux_dict_targets.append(env.Copy(os.path.join(env['LIBDIR'], rootmap_file.name), rootmap_file))
286 
287  # collect files for class version checks
288  check_files.append((os.path.join(env['BUILDDIR'], str(linkdef_file).replace(os.sep, '_') + '.check'), linkdef_file))
289 
290  # build a shared library with all source and dictionary files
291  if len(env['SRC_FILES']) > 0 or len(dict_files) > 0:
292 
293  # determine path of library and adjust path and name for modules
294  lib_dir_name = env['LIBDIR']
295  if is_module_dir:
296  lib_dir_name = env['MODDIR']
297  if os.path.basename(dir_name) != 'modules' and dir_name != '.':
298  lib_name = os.path.basename(dir_name)
299 
300  # update list of dataobject libraries
301  if is_dataobjects_dir:
302  parent_env['DATAOBJECT_LIB'] = lib_name
303 
304  # create library and map for modules
305  lib = env.SharedLibrary(os.path.join(lib_dir_name, lib_name),
306  [env['SRC_FILES'], dict_files])
307  debug = env.StripDebug(lib)
308 
309  # make sure pcm and rootmap files are installed before the library
310  env.Depends(lib, aux_dict_targets)
311 
312  lib_files = [lib, debug] + aux_dict_targets
313  if is_module_dir:
314  map_file = os.path.join(lib_dir_name, env.subst('$SHLIBPREFIX') + lib_name + '.b2modmap')
315  # Adding lib_files is important to ensure we load local module
316  # libraries if they are newer than those in central directory
317  map_sources = env['SRC_FILES'] + lib_files
318 
319  reg_map = env.RegMap(map_file, map_sources)
320  lib_files.append(reg_map)
321 
322  if env['MODULE_IO'] and env.get('HAS_DOT', False):
323  module_io = env.ModuleIo(reg_map)
324  env.Depends(module_io, [lib, reg_map])
325  env.Requires(module_io, [os.path.join('$BINDIR', 'basf2')])
326  env.Alias('module-io', module_io)
327 
328  # check class versions
329  for check_filename, linkdef_file in check_files:
330  env.ClassVersionCheck(check_filename, [linkdef_file, lib, debug] + env['REQUIRED_TOOLS'])
331 
332  # define build target aliases
333  env.Alias(lib_name, lib_files)
334  if is_module_dir:
335  define_aliases(env, lib_files, dir_name, 'modules')
336  else:
337  define_aliases(env, lib_files, dir_name, 'lib')
338 
339  # install python module libraries with a file name that is recognized by python
340  if is_python_module_dir:
341  pymod = env.InstallAs(os.path.join(env['LIBDIR'], os.path.basename(dir_name) + env.subst('$SHLIBSUFFIX')), lib)
342  define_aliases(env, pymod, dir_name, 'lib')
343  else:
344 
345  # add linkdef, and source files to parent environment if we are in a normal sub-directory
346  parent_env['LINKDEF_FILES'] += env['LINKDEF_FILES']
347  parent_env['SRC_FILES'] += env['SRC_FILES']
348 
349  # add dataobject libs to parent environment
350  if 'DATAOBJECT_LIB' in env.Dictionary():
351  parent_env.Append(DATAOBJECT_LIBS=env['DATAOBJECT_LIB'])
352  if 'DATAOBJECT_LIBS' in env.Dictionary():
353  parent_env.AppendUnique(DATAOBJECT_LIBS=env['DATAOBJECT_LIBS'])
354 
355  # process modules directory last so that it is known whether the main library exists
356  if os.path.isdir(os.path.join(dir_name, 'modules')):
357  process_dir(env, os.path.join(dir_name, 'modules'), True)
358 
359  # setup environment for building executables, include SConscript if it exists
360  save_env = env.Clone()
361  env['TOOLS_FILES'] = get_files(os.path.join(dir_name, 'tools', '*.cc'))
362  sconscript_name = os.path.join(dir_name, 'tools', 'SConscript')
363  if os.path.isfile(sconscript_name):
364  result = SConscript(sconscript_name, exports='env')
365  if isinstance(result, Environment):
366  env = result
367 
368  # build a binary for each source file in the tools directory
369  for bin_file in env['TOOLS_FILES']:
370  bin_filename = os.path.splitext(os.path.basename(str(bin_file)))[0]
371  bin_env = env.Clone()
372  bin_env['LIBS'] = []
373  if bin_filename in bin_env['TOOLS_LIBS']:
374  bin_env['LIBS'] = Flatten([env.subst(str(lib)).split() for lib in
375  Flatten(bin_env['TOOLS_LIBS'
376  ][bin_filename])])
377  if bin_filename in bin_env['TOOLS_LIBPATH']:
378  bin_env['LIBPATH'] = bin_env['TOOLS_LIBPATH'][bin_filename]
379  tool = bin_env.Program(os.path.join(bin_env['BINDIR'], bin_filename),
380  os.path.join(bin_env['BUILDDIR'],
381  str(bin_file)))
382  debug = bin_env.StripDebug(tool)
383  env.Alias(os.path.join(dir_name, 'tools', bin_filename), [tool, debug])
384  env.Alias(os.path.join(dir_name, 'tools'), [tool, debug])
385  env.Alias(os.path.join(dir_name, bin_filename), [tool, debug])
386  define_aliases(env, [tool, debug], dir_name, 'bin')
387 
388  # restore original environment
389  env = save_env
390 
391  # build shared objects from the tests/*.cc files in this directory
392  if len(local_test_files) > 0:
393  local_test_env = env.Clone()
394  sconscript_name = os.path.join(dir_name, 'tests', 'SConscript')
395  if os.path.isfile(sconscript_name):
396  result = SConscript(sconscript_name, exports='env')
397  if isinstance(result, Environment):
398  local_test_env = result
399  local_test_env.PrependUnique(LIBS=['test_main'])
400  local_test_env.AppendUnique(LIBS=['mdst_dbobjects', 'framework', '$ROOT_LIBS', 'gtest', 'pthread'])
401  env['TEST_FILES'] = [test_file for test_file in env['TEST_FILES']
402  if test_file not in local_test_files]
403  env.Prepend(TEST_FILES=local_test_env.SharedObject(local_test_files))
404  env.AppendUnique(TEST_LIBS=local_test_env['LIBS'])
405 
406  # combine all tests from subdirectories to a new test executable
407  if len(env['TEST_FILES']) > 0:
408  test_filename = lib_name + '-unittests'
409  test_env = env.Clone()
410  test_env['LIBS'] = env['TEST_LIBS']
411  test = test_env.Program(os.path.join(test_env['BINDIR'],
412  test_filename), env['TEST_FILES'])
413  env.StripDebug(test)
414  env.Alias(os.path.join(dir_name, 'tests', test_filename), test)
415  env.Alias(os.path.join(dir_name, 'tests'), test)
416  env.Alias(os.path.join(dir_name, test_filename), test)
417  define_aliases(env, test, dir_name, 'tests')
418 
419  # add test files and libs to parent environment
420  parent_env.AppendUnique(TEST_LIBS=env['TEST_LIBS'])
421  parent_env.Append(TEST_FILES=env['TEST_FILES'])
422 
423 
424 def generate(env):
425  env.AddMethod(process_dir, 'ProcessDirectory')
426 
427 
428 def exists(env):
429  return True