Belle II Software  release-05-01-25
submit_collector.py
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 
4 import sys
5 from pathlib import Path
6 import time
7 import glob
8 
9 # Add timestamp to all INFO messages
10 from basf2 import B2INFO, logging, LogLevel, LogInfo
11 currentInfo = logging.get_info(LogLevel.INFO)
12 logging.set_info(LogLevel.INFO, currentInfo | LogInfo.TIMESTAMP)
13 
14 from caf import backends
15 import ROOT
16 
17 
18 probcut = float(sys.argv[1]) if len(sys.argv) == 2 else 0.001
19 
20 root_dir = 'rootfile'
21 input_files = []
22 with open('runlist') as runlist:
23  lines = runlist.readlines()
24  for line in lines:
25  input_files.append(line.rstrip('\n'))
26 
27 # data_dir = '/hsm/belle2/bdata/Data/Raw/e0002/'
28 
29 # input_files = glob.glob(data_dir + "*/*/cosmic.*.root")
30 # print(input_files)
31 # exit(1)
32 
33 
38 job1 = backends.Job("CDC_Colllector")
39 job1.output_dir = str(Path(root_dir).absolute())
40 job1.working_dir = str(Path(root_dir).absolute())
41 
42 # This is the command that gets run inside the batch job
43 job1.cmd = ['basf2', 'run_collector.py', str(probcut)]
44 # Need to copy in the steering file so that each subjob has access to it in its working dir
45 job1.input_sandbox_files.append("run_collector.py")
46 # Your config file may be necessary for the run_collector.py in each job, so we copy it to the working directory
47 # job1.input_sandbox_files.append("location")
48 # You can either create a list of input files manually e.g. with glob
49 # import glob
50 
51 # input_files = [data_dir + f for f in runs]
52 # This lets us reduce the number of input files for testing purposes
53 # job1.input_files = input_files[:5]
54 job1.input_files = input_files
55 # Or you could just give the wildcard expansions directly in the input files of the job
56 # job1.input_files = [data_dir+"/cr*.root"]
57 
58 # You can choose how many input files to send to each subjob (-1 means all to one main job)
59 job1.max_files_per_subjob = 1
60 # Choose your queue
61 job1.backend_args['queue'] = 's'
62 
63 
64 # Here we submit the job to the batch system. Subjobs for the input data files are automatically created
65 lsf = backends.LSF()
66 lsf.submit(job1)
67 # Here we start waiting for the overall job to complete
68 ready = False
69 while not ready:
70  B2INFO("Are we done?")
71  ready = job1.ready()
72  B2INFO("Not done yet, will sleep")
73  time.sleep(60)
74 
75 B2INFO("Finished")
backends.LSF
Definition: backends.py:1567
backends.Job
Definition: backends.py:310