Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
Elisa-Visentin authored Aug 9, 2023
1 parent 4659ba8 commit 2339d42
Show file tree
Hide file tree
Showing 10 changed files with 1,215 additions and 0 deletions.
59 changes: 59 additions & 0 deletions magicctapipe/scripts/lst1_magic/nsb_scripts/LSTnsb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from lstchain.image.modifier import calculate_noise_parameters
import numpy as np
import yaml
import glob
def main():

"""
create list of LST runs with nsb
"""
with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading
config = yaml.safe_load(f)


simtel='/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray/node_theta_14.984_az_355.158_/output_v1.4/simtel_corsika_theta_14.984_az_355.158_run10.simtel.gz'
runs = config["general"]["LST_runs"]
nsb = config["general"]["nsb"]
width=[a/2 - b/2 for a, b in zip(nsb[1:], nsb[:-1])]

width.append(0.25)
nsb_limit=[a + b for a, b in zip(nsb[:], width[:])]
nsb_limit.insert(0,0)
print(nsb_limit)
lst_config='lstchain_standard_config.json'
with open(runs) as LSTfile:
LST_runs = np.genfromtxt(LSTfile,dtype=str,delimiter=',')
for i in LST_runs:
lstObsDir = i[0].split("_")[0]+i[0].split("_")[1]+i[0].split("_")[2]
inputdir = f'/fefs/aswg/data/real/DL1/{lstObsDir}/v0.9/tailcut84'
run = np.sort(glob.glob(f"{inputdir}/dl1*Run*{i[1]}.*.h5"))
noise=[]
if len(run)==0:
continue
if len(run)<10:
mod=1
else:
mod=int(len(run)/10)
for ii in range (0, len(run)):
if ii%mod==0:

a,b,c= calculate_noise_parameters(simtel, run[ii], lst_config)
noise.append(a)
a=sum(noise)/len(noise)
std=np.std(noise)
print('nsb average (all)',a, 'std', std)
subrun_ok=[]
for sr in range (0, len(noise)):
if np.abs(noise[sr]-a)<3*std:
subrun_ok.append(noise[sr])
a=sum(subrun_ok)/len(subrun_ok)
print('nsb average', a)
for j in range (0,len(nsb)):
if (a<nsb_limit[j+1])&(a>nsb_limit[j]):
with open(f"LST_{nsb[j]}_.txt", "a+") as f:

f.write(str(i[0])+","+str(i[1])+"\n")


if __name__ == "__main__":
main()
23 changes: 23 additions & 0 deletions magicctapipe/scripts/lst1_magic/nsb_scripts/config_general.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
mc_tel_ids:
LST-1: 1
LST-2: 0
LST-3: 0
LST-4: 0
MAGIC-I: 2
MAGIC-II: 3

directories:
workspace_dir : "/fefs/aswg/workspace/elisa.visentin/MAGIC_LST_analysis/PG1553_nsb/" #Pay attention to put the last "/"!!!
target_name : "PG1553"

general:



target_RA_deg : 238.929 #RA in degrees
target_Dec_deg: 11.1900 #Dec in degrees RA: 238.929. Dec: 11.1900
focal_length : "effective"
MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0"
LST_runs : "LST_runs.txt"
nsb : [0.5, 1.0, 1.5, 2.0, 2.5, 3.0]

14 changes: 14 additions & 0 deletions magicctapipe/scripts/lst1_magic/nsb_scripts/nsb.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/sh


#SBATCH -p long

#SBATCH -J LSTnsb

#SBATCH -N 1

ulimit -l unlimited
ulimit -s unlimited
ulimit -a

conda run -n magic-lst python LSTnsb.py > nsblog.log 2>&1
129 changes: 129 additions & 0 deletions magicctapipe/scripts/lst1_magic/nsb_scripts/nsb_DL1_to_DL2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
"""
This script creates the bashscripts necessary to apply "lst1_magic_dl1_stereo_to_dl2.py"
to the DL1 stereo data (real and MC). It also creates new subdirectories associated with
the data level 2. The DL2 files are saved at:
WorkingDirectory/DL2/
and in the subdirectories therein.
Usage:
$ python DL1_to_DL2.py
"""

import os
import numpy as np
import glob
import yaml
import logging

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)

def DL1_to_2(target_dir, nsb):

"""
This function creates the bash scripts to run lst1_magic_dl1_stereo_to_dl2.py.
Parameters
----------
target_dir: str
Path to the working directory
"""

if not os.path.exists(target_dir+"/DL2"):
os.mkdir(target_dir+"/DL2")

if not os.path.exists(target_dir+"/DL2/Observations"):
os.mkdir(target_dir+"/DL2/Observations")
if not os.path.exists(target_dir+"/DL2/Observations/"+str(nsb)):
os.mkdir(target_dir+"/DL2/Observations/"+str(nsb))


process_name = f"DL2_"+target_dir.split("/")[-2:][1]+str(nsb)
data_files_dir = target_dir+"/DL1/Observations/Coincident_stereo/"+str(nsb)
RFs_dir = "/fefs/aswg/workspace/elisa.visentin/MAGIC_LST_analysis/PG1553_nsb/RF/"+str(nsb) #then, RFs saved somewhere (as Julian's ones)
listOfDL1nights = np.sort(glob.glob(data_files_dir+"/*"))
print(data_files_dir)
for night in listOfDL1nights:
output = target_dir+f'/DL2/Observations/{nsb}/{night.split("/")[-1]}'
if not os.path.exists(output):
os.mkdir(output)

listOfDL1Files = np.sort(glob.glob(night+"/*.h5"))
np.savetxt(night+"/list_of_DL1_stereo_files.txt",listOfDL1Files, fmt='%s')
process_size = len(listOfDL1Files) - 1

f = open(f'DL1_to_DL2_{nsb}_{night.split("/")[-1]}.sh','w')
f.write('#!/bin/sh\n\n')
f.write('#SBATCH -p long\n')
f.write('#SBATCH -J '+process_name+'\n')
f.write(f"#SBATCH --array=0-{process_size}%100\n")
f.write('#SBATCH --mem=30g\n')
f.write('#SBATCH -N 1\n\n')
f.write('ulimit -l unlimited\n')
f.write('ulimit -s unlimited\n')
f.write('ulimit -a\n\n')

f.write(f"SAMPLE_LIST=($(<{night}/list_of_DL1_stereo_files.txt))\n")
f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n")
f.write(f'export LOG={output}'+'/DL1_to_DL2_${SLURM_ARRAY_TASK_ID}.log\n')
f.write(f'conda run -n magic-lst python lst1_magic_dl1_stereo_to_dl2.py --input-file-dl1 $SAMPLE --input-dir-rfs {RFs_dir} --output-dir {output} --config-file {target_dir}/../config_general.yaml >$LOG 2>&1\n\n')
f.close()



def main():

"""
Here we read the config_general.yaml file and call the functions defined above.
"""


with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading
config = yaml.safe_load(f)


target_dir = config["directories"]["workspace_dir"]+config["directories"]["target_name"]
listnsb = np.sort(glob.glob(f"LST_*_.txt"))
nsb=[]
for f in listnsb:
nsb.append(f.split('_')[1])

print('nsb', nsb)
for nsblvl in nsb:

print("***** Generating bashscripts for DL2...")
DL1_to_2(target_dir, nsblvl)



print("***** Running lst1_magic_dl1_stereo_to_dl2.py in the DL1 data files...")
print("Process name: DL2_"+target_dir.split("/")[-2:][1]+str(nsblvl))
print("To check the jobs submitted to the cluster, type: squeue -n DL2_"+target_dir.split("/")[-2:][1]+str(nsblvl))

#Below we run the bash scripts to perform the DL1 to DL2 cnoversion:
list_of_DL1_to_2_scripts = np.sort(glob.glob(f"DL1_to_DL2_{nsblvl}*.sh"))
print(list_of_DL1_to_2_scripts)
for n,run in enumerate(list_of_DL1_to_2_scripts):
if n == 0:
launch_jobs = f"dl2{n}=$(sbatch --parsable {run})"
else:
launch_jobs = launch_jobs + f" && dl2{n}=$(sbatch --parsable --dependency=afterany:$dl2{n-1} {run})"

#print(launch_jobs)
os.system(launch_jobs)

if __name__ == "__main__":
main()











136 changes: 136 additions & 0 deletions magicctapipe/scripts/lst1_magic/nsb_scripts/nsb_DL2_to_DL3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
"""
This scripts facilitates the usage of the script
"lst1_magic_dl2_to_dl3.py". This script is
more like a "maneger" that organizes the analysis
process by:
1) Creating the bash scripts for looking for
applying "lst1_magic_dl2_to_dl3.py"
2) Creating the subdirectories for the DL3
data files.
Usage:
$ python DL2_to_DL3.py
"""

import os
import numpy as np
import glob
import yaml
import logging
from pathlib import Path

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)

def configuration_DL3(ids, target_dir,target_coords):

"""
This function creates the configuration file needed for the DL2 to DL3 conversion
Parameters
----------
ids: list
list of telescope IDs
target_dir: str
Path to the working directory
"""

target_name = target_dir.split("/")[-1]

f = open(target_dir+'/config_DL3.yaml','w')
f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n")
f.write(f'dl2_to_dl3:\n interpolation_method: "linear" # select "nearest", "linear" or "cubic"\n interpolation_scheme: "cosZdAz" # select "cosZdAz" or "cosZd"\n max_distance: "45. deg"\n source_name: "{target_name}"\n source_ra: "{target_coords[0]} deg" # used when the source name cannot be resolved\n source_dec: "{target_coords[1]} deg" # used when the source name cannot be resolved\n\n')

f.close()


def DL2_to_DL3(target_dir, nsb):

"""
This function creates the bash scripts to run lst1_magic_dl2_to_dl3.py on the real data.
Parameters
----------
target_dir: str
Path to the working directory
"""

if not os.path.exists(target_dir+"/DL3"):
os.mkdir(target_dir+"/DL3")

process_name = "DL3_"+target_dir.split("/")[-2:][1]+str(nsb)
output = target_dir+"/DL3"
nights = np.sort(glob.glob(target_dir+f"/DL2/Observations/{nsb}/*"))
IRF_dir = "/fefs/aswg/workspace/elisa.visentin/MAGIC_LST_analysis/PG1553_nsb/IRF/"+str(nsb) #IRF direcctory

for night in nights:
listOfDL2files = np.sort(glob.glob(night+"/Merged/*.h5"))

f = open(f'DL3_{nsb}_{night.split("/")[-1]}.sh','w')
f.write('#!/bin/sh\n\n')
f.write('#SBATCH -p short\n')
f.write('#SBATCH -J '+process_name+'\n')
f.write('#SBATCH --mem=10g\n')
f.write('#SBATCH -N 1\n\n')
f.write('ulimit -l unlimited\n')
f.write('ulimit -s unlimited\n')
f.write('ulimit -a\n\n')


for DL2_file in listOfDL2files:
f.write(f'export LOG={output}/DL3_{DL2_file.split("/")[-1]}.log\n')
f.write(f'conda run -n magic-lst python lst1_magic_dl2_to_dl3.py --input-file-dl2 {DL2_file} --input-dir-irf {IRF_dir} --output-dir {output} --config-file {target_dir}/config_DL3.yaml >$LOG 2>&1\n\n')

f.close()


def main():

"""
Here we read the config_general.yaml file and call the functions defined above.
"""

with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading
config = yaml.safe_load(f)

telescope_ids = list(config["mc_tel_ids"].values())

target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"]

target_coords = [config["general"]["target_RA_deg"],config["general"]["target_Dec_deg"]]

print("***** Generating file config_DL3.yaml...")
print("***** This file can be found in ",target_dir)
configuration_DL3(telescope_ids, target_dir, target_coords)
listnsb = np.sort(glob.glob(f"LST_*_.txt"))
nsb=[]
for f in listnsb:
nsb.append(f.split('_')[1])

print('nsb', nsb)
for nsblvl in nsb:

print("***** Generating bashscripts for DL2-DL3 conversion...")
DL2_to_DL3(target_dir, nsblvl)

print("***** Running lst1_magic_dl2_to_dl3.py in the DL2 real data files...")
print("Process name: DL3_"+target_dir.split("/")[-2:][1]+str(nsblvl))
print("To check the jobs submitted to the cluster, type: squeue -n DL3_"+target_dir.split("/")[-2:][1]+str(nsblvl))

#Below we run the bash scripts to perform the DL1 to DL2 cnoversion:
list_of_DL2_to_DL3_scripts = np.sort(glob.glob(f"DL3_{nsblvl}_2*.sh"))

for n,run in enumerate(list_of_DL2_to_DL3_scripts):
if n == 0:
launch_jobs = f"dl3{n}=$(sbatch --parsable {run})"
else:
launch_jobs = launch_jobs + f" && dl3{n}=$(sbatch --parsable --dependency=afterany:$dl3{n-1} {run})"

os.system(launch_jobs)

if __name__ == "__main__":
main()

Loading

0 comments on commit 2339d42

Please sign in to comment.