Skip to content
Snippets Groups Projects
Commit 640fbe59 authored by AJRubio-Montero's avatar AJRubio-Montero
Browse files

Stable release of onedataSim tested during the EOSC-SYNERGY simulation Challenge in 21-27 May 2021

parent 4bd10d3a
No related branches found
No related tags found
No related merge requests found
...@@ -70,13 +70,18 @@ RUN yum -y install acl attr ...@@ -70,13 +70,18 @@ RUN yum -y install acl attr
# xattr (this is python2 but I had found the command only in python2) # xattr (this is python2 but I had found the command only in python2)
RUN yum -y install python2-pip python-devel libffi-devel RUN yum -y install python2-pip python-devel libffi-devel
# sometimes pip's upgrade fails # sometimes pip's upgrade fails and doesn't find "typing" module
#RUN pip install --upgrade pip # RUN pip install --upgrade pip
# RUN pip install typing
RUN pip install cffi
RUN pip install xattr RUN pip install xattr
#python3 and libraries for Lago processing with onedata #python3 and libraries for Lago processing with onedata
RUN yum -y install python3 python36-pyxattr RUN yum -y install python3 python36-pyxattr
# utils for debugging
RUN yum -y install screen nano
WORKDIR /opt/corsika-77402-lago/run WORKDIR /opt/corsika-77402-lago/run
#ENTRYPOINT /opt/arti/sims/do_datahub.sh #ENTRYPOINT /opt/arti/sims/do_datahub.sh
CMD bash CMD bash
Subproject commit ae6d90f66a978e0ac3e874ac887229d8801e7617 Subproject commit d8f8caa280550078fad90fc2dfc9b1c4c06ca543
...@@ -138,10 +138,10 @@ def get_sys_args(): ...@@ -138,10 +138,10 @@ def get_sys_args():
# echo -e " -s <site> : \ # echo -e " -s <site> : \
# Location (several options)" # Location (several options)"
parser.add_argument('-s', dest='s', required=True, parser.add_argument('-s', dest='s', required=True,
choices=[ "QUIE","and","asu","ber","bga","brc","bue", #choices=[ "QUIE","and","asu","ber","bga","brc","bue",
"cha","chia","cpv","cuz","gua","kna","lim", # "cha","chia","cpv","cuz","gua","kna","lim",
"lpb","lsc","mapi","mge","pam","sac","sao", # "lpb","lsc","mapi","mge","pam","sac","sao",
"sawb","serb","sng","tac","tuc","vcp" ], # "sawb","serb","sng","tac","tuc","vcp" ],
help='Predefined LAGO site') help='Predefined LAGO site')
# echo -e " -j <procs> : \ # echo -e " -j <procs> : \
# Number of processors to use" # Number of processors to use"
......
...@@ -95,7 +95,8 @@ def get_first_catalog_metadata_json(catcodename, arti_params_dict): ...@@ -95,7 +95,8 @@ def get_first_catalog_metadata_json(catcodename, arti_params_dict):
return json.loads(s) return json.loads(s)
def get_catalog_metadata_activity(startdate, enddate, arti_params_dict): def get_catalog_metadata_activity(startdate, enddate, catcodename,
arti_params_dict):
with open(onedataSimPath+'/json_tpl/catalog_corsika_activity.json', with open(onedataSimPath+'/json_tpl/catalog_corsika_activity.json',
'r') as file1: 'r') as file1:
...@@ -218,6 +219,10 @@ def _consumer_onedata_mv(onedata_path): ...@@ -218,6 +219,10 @@ def _consumer_onedata_mv(onedata_path):
print(id + ': copy queued again') print(id + ': copy queued again')
q_onedata.put(md) q_onedata.put(md)
time.sleep(2) time.sleep(2)
# we have to substract 1 to queue lenght because q.put
# always add 1 to lenght but really we are re-queing and
# size remains the same
q_onedata.task_done()
def _run_check_and_copy_results(catcodename, filecode, task, onedata_path, def _run_check_and_copy_results(catcodename, filecode, task, onedata_path,
...@@ -266,7 +271,27 @@ def _producer(catcodename, arti_params): ...@@ -266,7 +271,27 @@ def _producer(catcodename, arti_params):
# clean a possible previous simulation # clean a possible previous simulation
if os.path.exists(catcodename): if os.path.exists(catcodename):
shutil.rmtree(catcodename, ignore_errors=True) shutil.rmtree(catcodename, ignore_errors=True)
# PATCH: correct the creation of tasks, which is based on (-j) in ARTI.
# ARTI tries fit the number of tasks (NRUN) to the number of procs
# for being correct in terms of physics, however was not implemented
# for fit the output sizes vs flux-time (arti_params[t])
params_aux_flux = arti_params[arti_params.find("-t")+3:]
flux_time = int(params_aux_flux[:params_aux_flux.find("-")])
params_aux = arti_params[arti_params.find("-j")+3:]
old_j = int(params_aux[:params_aux.find("-")])
aux_j = int(int(flux_time)/900)
if aux_j == 0 :
aux_j = 1
if aux_j > 12 :
aux_j = 12
arti_params = arti_params[:arti_params.find("-j")] + "-j " + str(aux_j) +" " + params_aux[params_aux.find("-"):]
print("PATCH: change -j : " + str(old_j) + " by :" + str(aux_j) + " to generate tasks")
cmd = 'do_sims.sh ' + arti_params cmd = 'do_sims.sh ' + arti_params
_run_Popen_interactive(cmd) _run_Popen_interactive(cmd)
...@@ -276,8 +301,9 @@ def _producer(catcodename, arti_params): ...@@ -276,8 +301,9 @@ def _producer(catcodename, arti_params):
_run_Popen(cmd) _run_Popen(cmd)
# WARNING, I HAD TO PATCH rain.pl FOR AVOID .long files !!! # WARNING, I HAD TO PATCH rain.pl FOR AVOID .long files !!!
cmd = "sed 's/\$llongi /F /' rain.pl -i" # 20210519 not necessary since arti@d8f8caa
_run_Popen(cmd) # cmd = "sed 's/\$llongi /F /' rain.pl -i"
# _run_Popen(cmd)
# -g only creates .input's # -g only creates .input's
# cmd="sed 's/\.\/rain.pl/echo \$i: \.\/rain.pl -g /' go-*.sh -i" # cmd="sed 's/\.\/rain.pl/echo \$i: \.\/rain.pl -g /' go-*.sh -i"
...@@ -314,13 +340,17 @@ def _consumer(catcodename, onedata_path, arti_params_dict): ...@@ -314,13 +340,17 @@ def _consumer(catcodename, onedata_path, arti_params_dict):
q.task_done() q.task_done()
except Exception as inst: except Exception as inst:
q.put((filecode, task)) q.put((filecode, task))
# we have to substract 1 to queue lenght because q.put
# always add 1 to lenght but really we are re-queing and
# size remains the same
q.task_done()
# ------------ main stuff --------- # ------------ main stuff ---------
(arti_params, arti_params_dict, arti_params_json_md) = get_sys_args() (arti_params, arti_params_dict, arti_params_json_md) = get_sys_args()
catcodename = arti_params_dict["p"] catcodename = arti_params_dict["p"]
# onedata_path = '/mnt/datahub.egi.eu/LAGOsim'
onedata_path = '/mnt/datahub.egi.eu/LAGOsim' onedata_path = '/mnt/datahub.egi.eu/LAGOsim'
#onedata_path = '/mnt/datahub.egi.eu/test8/LAGOSIM'
catalog_path = onedata_path + '/' + catcodename catalog_path = onedata_path + '/' + catcodename
print(arti_params, arti_params_dict, arti_params_json_md) print(arti_params, arti_params_dict, arti_params_json_md)
...@@ -375,6 +405,7 @@ md['dataset'] = ["/" + catcodename + "/" + s for s in ...@@ -375,6 +405,7 @@ md['dataset'] = ["/" + catcodename + "/" + s for s in
md = _add_json(md, json.loads(get_catalog_metadata_activity(main_start_date, md = _add_json(md, json.loads(get_catalog_metadata_activity(main_start_date,
_xsd_dateTime(), _xsd_dateTime(),
catcodename,
arti_params_dict))) arti_params_dict)))
_write_file(catalog_path + '/.metadata/.' + catcodename + '.jsonld', _write_file(catalog_path + '/.metadata/.' + catcodename + '.jsonld',
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment