Belle II Software  release-08-01-10
submit_collector.py
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 
4 
11 
12 import sys
13 from pathlib import Path
14 import time
15 from caf import backends
16 
17 # Add timestamp to all INFO messages
18 from basf2 import B2INFO, logging, LogLevel, LogInfo
19 currentInfo = logging.get_info(LogLevel.INFO)
20 logging.set_info(LogLevel.INFO, currentInfo | LogInfo.TIMESTAMP)
21 
22 
23 probcut = float(sys.argv[1]) if len(sys.argv) == 2 else 0.001
24 
25 root_dir = 'rootfile'
26 input_files = []
27 with open('runlist') as runlist:
28  lines = runlist.readlines()
29  for line in lines:
30  input_files.append(line.rstrip('\n'))
31 
32 # data_dir = '/hsm/belle2/bdata/Data/Raw/e0002/'
33 
34 # input_files = glob.glob(data_dir + "*/*/cosmic.*.root")
35 # print(input_files)
36 # exit(1)
37 
38 
43 job1 = backends.Job("CDC_Colllector")
44 job1.output_dir = str(Path(root_dir).absolute())
45 job1.working_dir = str(Path(root_dir).absolute())
46 
47 # This is the command that gets run inside the batch job
48 job1.cmd = ['basf2', 'run_collector.py', str(probcut)]
49 # Need to copy in the steering file so that each subjob has access to it in its working dir
50 job1.input_sandbox_files.append("run_collector.py")
51 # Your config file may be necessary for the run_collector.py in each job, so we copy it to the working directory
52 # job1.input_sandbox_files.append("location")
53 # You can either create a list of input files manually e.g. with glob
54 # import glob
55 
56 # input_files = [data_dir + f for f in runs]
57 # This lets us reduce the number of input files for testing purposes
58 # job1.input_files = input_files[:5]
59 job1.input_files = input_files
60 # Or you could just give the wildcard expansions directly in the input files of the job
61 # job1.input_files = [data_dir+"/cr*.root"]
62 
63 # You can choose how many input files to send to each subjob (-1 means all to one main job)
64 job1.max_files_per_subjob = 1
65 # Choose your queue
66 job1.backend_args['queue'] = 's'
67 
68 
69 # Here we submit the job to the batch system. Subjobs for the input data files are automatically created
70 lsf = backends.LSF()
71 lsf.submit(job1)
72 # Here we start waiting for the overall job to complete
73 ready = False
74 while not ready:
75  B2INFO("Are we done?")
76  ready = job1.ready()
77  B2INFO("Not done yet, will sleep")
78  time.sleep(60)
79 
80 B2INFO("Finished")