Belle II Software light-2406-ragdoll
process_dir.py
1#!/usr/bin/env python
2
3
10
11import os
12import stat
13from SCons.Script import Environment, File, Flatten, Glob, SConscript
14
15
16def define_aliases(
17 env,
18 target,
19 dir_name,
20 extension=None,
21):
22
23 parent_dir = dir_name
24 while len(parent_dir) > 0:
25 env.Alias(parent_dir, target)
26 if extension:
27 env.Alias(parent_dir + '.' + extension, target)
28
29 next_parent_dir = os.path.split(parent_dir)[0]
30 # check split actually does something here
31 if next_parent_dir == parent_dir:
32 break
33 parent_dir = next_parent_dir
34
35 if extension:
36 env.Alias(extension, target)
37
38
39def get_files(path_name):
40 result = Glob(path_name)
41 files = [f for f in result if not os.path.isdir(str(f))]
42 return files
43
44
45def get_python_files_recursive(topdir_path):
46 python_file_nodes = []
47
48 for (dir_path, dir_names, file_names) in os.walk(topdir_path):
49 for file_name in file_names:
50 if file_name.endswith('.py'):
51 file_path = os.path.join(dir_path, file_name)
52 file_node = File(file_path)
53 python_file_nodes.append(file_node)
54
55 return python_file_nodes
56
57
58def process_dir(
59 parent_env,
60 dir_name,
61 is_module_dir
62):
63
64 # remove leading ./
65 if dir_name.startswith('./'):
66 dir_name = dir_name[2:]
67
68 # determine library name
69 if dir_name == '.':
70 lib_name = parent_env['PACKAGE']
71 else:
72 lib_name = dir_name.replace(os.sep, '_')
73
74 # get list of header and linkdef files
75 header_files = get_files(os.path.join(dir_name, '*.h'))
76 if dir_name != '.':
77 header_files += get_files(os.path.join(dir_name, 'include', '*.h'))
78 linkdef_files = []
79 for header_file in header_files:
80 if str(header_file).lower().endswith('linkdef.h'):
81 linkdef_files.append(header_file)
82 header_files.remove(header_file)
83
84 # get list of source files
85 c_sources = get_files(os.path.join(dir_name, '*.cc')) \
86 + get_files(os.path.join(dir_name, 'src', '*.cc')) \
87 + get_files(os.path.join(dir_name, '*.c')) \
88 + get_files(os.path.join(dir_name, 'src', '*.c'))
89 fortran_sources = get_files(os.path.join(dir_name, '*.f')) \
90 + get_files(os.path.join(dir_name, 'src', '*.f')) \
91 + get_files(os.path.join(dir_name, '*.F')) \
92 + get_files(os.path.join(dir_name, 'src', '*.F')) \
93 + get_files(os.path.join(dir_name, '*.f90')) \
94 + get_files(os.path.join(dir_name, 'src', '*.f90')) \
95 + get_files(os.path.join(dir_name, '*.F90')) \
96 + get_files(os.path.join(dir_name, 'src', '*.F90'))
97 src_nodes = c_sources + fortran_sources
98 src_files = [os.path.join(parent_env['BUILDDIR'], str(node)) for node in
99 src_nodes]
100
101 # get list of test files
102 test_files = [os.path.join(parent_env['BUILDDIR'], str(node)) for node in
103 get_files(os.path.join(dir_name, 'tests', '*.cc'))]
104
105 # get list of script files
106 script_files = get_python_files_recursive(os.path.join(dir_name, 'scripts'
107 ))
108
109 # get list of executable script files
110 executable_files = []
111 executable_mode = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
112 for tools_file in get_files(os.path.join(dir_name, 'tools', '*')):
113 executable_file = str(tools_file)
114 if os.stat(executable_file).st_mode & executable_mode \
115 == executable_mode:
116 executable_files.append(tools_file)
117
118 # get list of data files
119 data_files = get_files(os.path.join(dir_name, 'data', '*'))
120
121 # create environment for directory
122 env = parent_env.Clone()
123 env['HEADER_FILES'] = header_files
124 env['LINKDEF_FILES'] = linkdef_files
125 env['SRC_FILES'] = src_files
126 env['TEST_FILES'] = test_files
127 env['TEST_LIBS'] = []
128 env['SCRIPT_FILES'] = script_files
129 env['EXECUTABLE_FILES'] = executable_files
130 env['DATA_FILES'] = data_files
131 env['DATAOBJECT_LIB'] = []
132 env['DATAOBJECT_LIBS'] = []
133
134 # clean up some environment variables that should not be inherited from the parent environment
135 if 'SUBLIB' in env.Dictionary():
136 del env.Dictionary()['SUBLIB']
137 if 'PYTHON_MODULE' in env.Dictionary():
138 del env.Dictionary()['PYTHON_MODULE']
139 if 'LIBS' in env.Dictionary():
140 del env.Dictionary()['LIBS']
141
142 if dir_name in env.get('DISABLE_COMPILER_WARNINGS', []):
143 env.AppendUnique(CXXFLAGS=['-w'], CCFLAGS=['-w'], FORTRANFLAGS=['-w'], LINKFLAGS=['-w'])
144
145 # link dataobjects to analysis modules
146 if dir_name == '.':
147 env.Append(LIBS=['dataobjects'])
148
149 # include SConscript file if it exists
150 sconscript_name = os.path.join(dir_name, 'SConscript')
151 if os.path.isfile(sconscript_name):
152 result = SConscript(sconscript_name, exports='env')
153
154 # use the new environment if it was updated by the SConscript file
155 if isinstance(result, Environment):
156 env = result
157
158 # don't continue with the default build process if the SConscript file requests this
159 if not env.Dictionary().get('CONTINUE', True):
160 return
161
162 # Add additional sources.
163 if 'ADDITIONAL_SOURCES' in env.Dictionary():
164 additional_src_nodes = []
165 for source in env.Dictionary()['ADDITIONAL_SOURCES']:
166 additional_src_nodes += get_files(os.path.join(dir_name, source))
167 additional_src_files = [
168 os.path.join(parent_env['BUILDDIR'], str(node)) for node in
169 additional_src_nodes]
170 if (len(additional_src_files) > 0):
171 src_files.append(additional_src_files)
172 env['SRC_FILES'] = src_files
173
174 # Create Fortran module directory.
175 if len(fortran_sources) > 0:
176 fortran_module_dir = env.Dir('$FORTRANMODDIR').get_abspath()
177 if not os.path.isdir(fortran_module_dir):
178 os.makedirs(fortran_module_dir)
179
180 # install header files in the include directory
181 includes = env.Install(os.path.join(env['INCDIR'], dir_name),
182 env['HEADER_FILES'])
183 define_aliases(env, includes, dir_name, 'include')
184
185 # install script files in the library directory
186 script_targets = []
187 for script_file_node in env['SCRIPT_FILES']:
188 script_file_path = str(script_file_node)
189 script_dir = os.path.dirname(script_file_path)
190
191 destination_reldir = os.path.relpath(script_dir,
192 os.path.join(dir_name, 'scripts'))
193
194 if not destination_reldir:
195 continue
196 destination_dir = os.path.join(env['LIBDIR'], destination_reldir)
197 script_target = env.Install(destination_dir, script_file_node)
198 script_targets.append(script_target)
199
200 define_aliases(env, script_targets, dir_name, 'scripts')
201
202 # install executable script files in the bin directory
203 executables = env.Install(env['BINDIR'], env['EXECUTABLE_FILES'])
204 define_aliases(env, executables, dir_name, 'tools')
205
206 # install data files in the data directory
207 data = env.Install(os.path.join(env['DATADIR'], dir_name), env['DATA_FILES'
208 ])
209 define_aliases(env, data, dir_name, 'data')
210
211 # remember tests defined in this directory
212 local_test_files = env['TEST_FILES']
213
214 # loop over subdirs
215 entries = os.listdir(dir_name)
216 exclude_dirs = set()
217 if (os.path.exists(os.path.join(dir_name, '.excluded_directories'))):
218 f = open('.excluded_directories')
219 for line in f.readlines():
220 exclude_dirs.add(line.rstrip('\n'))
221 f.close()
222 print(f'Excluded directories: {exclude_dirs}')
223 for entry in entries:
224 if entry in exclude_dirs:
225 continue
226 if entry.find('.') == -1 \
227 and not os.path.isfile(os.path.join(dir_name, entry)) and entry not in [
228 'include',
229 'src',
230 'tools',
231 'tests',
232 'scripts',
233 'data',
234 'doc',
235 'examples',
236 'modules',
237 ]:
238 if dir_name == '.' and entry in [
239 'build',
240 'include',
241 'lib',
242 'bin',
243 'modules',
244 'data',
245 'site_scons',
246 ]:
247 continue
248 process_dir(env, os.path.join(dir_name, entry), is_module_dir and dir_name != '.')
249
250 # determine whether we are in a special directory
251 is_package_dir = dir_name == env['PACKAGE']
252 is_sublib_dir = env.Dictionary().get('SUBLIB', False)
253 is_python_module_dir = env.Dictionary().get('PYTHON_MODULE', False)
254 is_dataobjects_dir = os.path.basename(dir_name) == 'dataobjects' \
255 and env['PACKAGE'] != 'framework'
256 if dir_name == 'dataobjects':
257 is_dataobjects_dir = True
258 is_module_dir = False
259 lib_name = parent_env['PACKAGE']
260
261 # check whether we have to create a new library
262 if is_package_dir or is_sublib_dir or is_python_module_dir \
263 or is_module_dir or is_dataobjects_dir:
264
265 # generate dictionaries
266 dict_files = []
267 aux_dict_targets = []
268 check_files = []
269 for linkdef_file in env['LINKDEF_FILES']:
270 # set the name of library generated at this stage
271 # will be read by the RootDict builder
272 dict_filename = str(linkdef_file).replace(os.sep, '_')[:-9] + 'Dict.cc'
273 dict_file, rootmap_file, rootpcm_file = env.RootDict(os.path.join(env['BUILDDIR'], dict_filename), linkdef_file,
274 ROOTCLING_ROOTMAP_LIB=lib_name)
275 # add the extra cxxflags
276 dict_ccflags = env["CCFLAGS"] + env["ROOTCLING_EXTRA_CCFLAGS"]
277 # add current directory to include path for dictionary compilation
278 dict_files.append(env.SharedObject(dict_file, CPPPATH=['.'] + env['CPPPATH'], CCFLAGS=dict_ccflags))
279
280 # install corresponding pcm file in lib (for cling)
281 aux_dict_targets.append(env.Copy(os.path.join(env['LIBDIR'], rootpcm_file.name), rootpcm_file))
282 # install corresponding rootmap files to support auto-loading of libraries
283 # once used via ROOT
284 aux_dict_targets.append(env.Copy(os.path.join(env['LIBDIR'], rootmap_file.name), rootmap_file))
285
286 # collect files for class version checks
287 check_files.append((os.path.join(env['BUILDDIR'], str(linkdef_file).replace(os.sep, '_') + '.check'), linkdef_file))
288
289 # build a shared library with all source and dictionary files
290 if len(env['SRC_FILES']) > 0 or len(dict_files) > 0:
291
292 # determine path of library and adjust path and name for modules
293 lib_dir_name = env['LIBDIR']
294 if is_module_dir:
295 lib_dir_name = env['MODDIR']
296 if os.path.basename(dir_name) != 'modules' and dir_name != '.':
297 lib_name = os.path.basename(dir_name)
298
299 # update list of dataobject libraries
300 if is_dataobjects_dir:
301 parent_env['DATAOBJECT_LIB'] = lib_name
302
303 # create library and map for modules
304 lib = env.SharedLibrary(os.path.join(lib_dir_name, lib_name),
305 [env['SRC_FILES'], dict_files])
306 debug = env.StripDebug(lib)
307
308 # make sure pcm and rootmap files are installed before the library
309 env.Depends(lib, aux_dict_targets)
310
311 lib_files = [lib, debug] + aux_dict_targets
312 if is_module_dir:
313 map_file = os.path.join(lib_dir_name, env.subst('$SHLIBPREFIX') + lib_name + '.b2modmap')
314 # Adding lib_files is important to ensure we load local module
315 # libraries if they are newer than those in central directory
316 map_sources = env['SRC_FILES'] + lib_files
317
318 reg_map = env.RegMap(map_file, map_sources)
319 lib_files.append(reg_map)
320
321 if env['MODULE_IO'] and env.get('HAS_DOT', False):
322 module_io = env.ModuleIo(reg_map)
323 env.Depends(module_io, [lib, reg_map])
324 env.Requires(module_io, [os.path.join('$BINDIR', 'basf2')])
325 env.Alias('module-io', module_io)
326
327 # check class versions
328 for check_filename, linkdef_file in check_files:
329 env.ClassVersionCheck(check_filename, [linkdef_file, lib, debug] + env['REQUIRED_TOOLS'])
330
331 # define build target aliases
332 env.Alias(lib_name, lib_files)
333 if is_module_dir:
334 define_aliases(env, lib_files, dir_name, 'modules')
335 else:
336 define_aliases(env, lib_files, dir_name, 'lib')
337
338 # install python module libraries with a file name that is recognized by python
339 if is_python_module_dir:
340 pymod = env.InstallAs(os.path.join(env['LIBDIR'], os.path.basename(dir_name) + env.subst('$SHLIBSUFFIX')), lib)
341 define_aliases(env, pymod, dir_name, 'lib')
342 else:
343
344 # add linkdef, and source files to parent environment if we are in a normal sub-directory
345 parent_env['LINKDEF_FILES'] += env['LINKDEF_FILES']
346 parent_env['SRC_FILES'] += env['SRC_FILES']
347
348 # add dataobject libs to parent environment
349 if 'DATAOBJECT_LIB' in env.Dictionary():
350 parent_env.Append(DATAOBJECT_LIBS=env['DATAOBJECT_LIB'])
351 if 'DATAOBJECT_LIBS' in env.Dictionary():
352 parent_env.AppendUnique(DATAOBJECT_LIBS=env['DATAOBJECT_LIBS'])
353
354 # process modules directory last so that it is known whether the main library exists
355 if os.path.isdir(os.path.join(dir_name, 'modules')):
356 process_dir(env, os.path.join(dir_name, 'modules'), True)
357
358 # setup environment for building executables, include SConscript if it exists
359 save_env = env.Clone()
360 env['TOOLS_FILES'] = get_files(os.path.join(dir_name, 'tools', '*.cc'))
361 sconscript_name = os.path.join(dir_name, 'tools', 'SConscript')
362 if os.path.isfile(sconscript_name):
363 result = SConscript(sconscript_name, exports='env')
364 if isinstance(result, Environment):
365 env = result
366
367 # build a binary for each source file in the tools directory
368 for bin_file in env['TOOLS_FILES']:
369 bin_filename = os.path.splitext(os.path.basename(str(bin_file)))[0]
370 bin_env = env.Clone()
371 bin_env['LIBS'] = []
372 if bin_filename in bin_env['TOOLS_LIBS']:
373 bin_env['LIBS'] = Flatten([env.subst(str(lib)).split() for lib in
374 Flatten(bin_env['TOOLS_LIBS'
375 ][bin_filename])])
376 if bin_filename in bin_env['TOOLS_LIBPATH']:
377 bin_env['LIBPATH'] = bin_env['TOOLS_LIBPATH'][bin_filename]
378 tool = bin_env.Program(os.path.join(bin_env['BINDIR'], bin_filename),
379 os.path.join(bin_env['BUILDDIR'],
380 str(bin_file)))
381 debug = bin_env.StripDebug(tool)
382 env.Alias(os.path.join(dir_name, 'tools', bin_filename), [tool, debug])
383 env.Alias(os.path.join(dir_name, 'tools'), [tool, debug])
384 env.Alias(os.path.join(dir_name, bin_filename), [tool, debug])
385 define_aliases(env, [tool, debug], dir_name, 'bin')
386
387 # restore original environment
388 env = save_env
389
390 # build shared objects from the tests/*.cc files in this directory
391 if len(local_test_files) > 0:
392 local_test_env = env.Clone()
393 sconscript_name = os.path.join(dir_name, 'tests', 'SConscript')
394 if os.path.isfile(sconscript_name):
395 result = SConscript(sconscript_name, exports='env')
396 if isinstance(result, Environment):
397 local_test_env = result
398 local_test_env.PrependUnique(LIBS=['test_main'])
399 local_test_env.AppendUnique(LIBS=['mdst_dbobjects', 'framework', '$ROOT_LIBS', 'gtest', 'pthread'])
400 env['TEST_FILES'] = [test_file for test_file in env['TEST_FILES']
401 if test_file not in local_test_files]
402 env.Prepend(TEST_FILES=local_test_env.SharedObject(local_test_files))
403 env.AppendUnique(TEST_LIBS=local_test_env['LIBS'])
404
405 # combine all tests from subdirectories to a new test executable
406 if len(env['TEST_FILES']) > 0:
407 test_filename = lib_name + '-unittests'
408 test_env = env.Clone()
409 test_env['LIBS'] = env['TEST_LIBS']
410 test = test_env.Program(os.path.join(test_env['BINDIR'],
411 test_filename), env['TEST_FILES'])
412 env.StripDebug(test)
413 env.Alias(os.path.join(dir_name, 'tests', test_filename), test)
414 env.Alias(os.path.join(dir_name, 'tests'), test)
415 env.Alias(os.path.join(dir_name, test_filename), test)
416 define_aliases(env, test, dir_name, 'tests')
417
418 # add test files and libs to parent environment
419 parent_env.AppendUnique(TEST_LIBS=env['TEST_LIBS'])
420 parent_env.Append(TEST_FILES=env['TEST_FILES'])
421
422
423def generate(env):
424 env.AddMethod(process_dir, 'ProcessDirectory')
425
426
427def exists(env):
428 return True