diff --git a/add_files_to_obs.sh b/add_files_to_obs.sh new file mode 100644 index 0000000000000000000000000000000000000000..bf769dfe4be087547b0568ed1e2171587264a5e8 --- /dev/null +++ b/add_files_to_obs.sh @@ -0,0 +1,6 @@ +# first par is name of dataset, second one is file with list of filenames in it +rucio add-dataset lofar:$1 +for fname in `cat $2` +do +rucio attach lofar:$1 fname +done diff --git a/add_obs_to_container.sh b/add_obs_to_container.sh new file mode 100644 index 0000000000000000000000000000000000000000..afcccfe89353f78de333449f9fb3951ce9ec33db --- /dev/null +++ b/add_obs_to_container.sh @@ -0,0 +1,6 @@ +# first par is container name, second par a list of obs for the container +rucio add-container lofar:$1 +for oname in `cat $2` +do +rucio attach lofar:$1 oname +done diff --git a/create_movescript.py b/create_movescript.py index 984aa44bb3d80c44847f72cb0829df6b38f06c9f..d91e86bc4cb1c4b84d713f8fc9c3a2bd9c673004 100755 --- a/create_movescript.py +++ b/create_movescript.py @@ -17,7 +17,7 @@ def data_parser(filedata, scope='lofar'): name = os.path.basename(pth) checkline = filedata[idx+6] checksum = checkline.split(":")[1].strip() - datafiles.append({'name':name, 'bytes': nbytes, 'adler32':checksum, 'meta':meta}) + datafiles.append({'scope':scope, 'name':name, 'bytes': nbytes, 'adler32':checksum, 'meta':meta}) filepaths.append(pth) return filepaths, datafiles @@ -34,16 +34,14 @@ srm_root_host = "srm://srm.grid.sara.nl" rucio_rse_rootdir = "srm://srm.grid.sara.nl/pnfs/grid.sara.nl/data/escape/disk/rucio/sara_dcache" rdt = protocol.RSEDeterministicTranslation() move_commands = list() -topaths = list() for frompath in filepaths: fname = os.path.basename(frompath) tosubpath = rdt.path(scope,fname) from_address = "/".join([srm_root_host, frompath]) to_address = "/".join([rucio_rse_rootdir, tosubpath]) - topaths.append(tosubpath[len(scope)+1:]) # strip off scope and slash move_commands.append("srmmv {src} {tgt}\n".format(src=from_address, tgt=to_address)) with open("data_movescript.sh", 'w') as dms: dms.writelines(move_commands) with open("datapaths.pck","wb") as path_pickle: - pickle.dump(topaths, path_pickle) + pickle.dump(filelist_data, path_pickle) diff --git a/register_movedfiles.py b/register_movedfiles.py index 673509a16254b1258f40e9e472c1147ccaf3953b..7a9624177bbc06b193d3a0e81b849e7275a9c010 100755 --- a/register_movedfiles.py +++ b/register_movedfiles.py @@ -4,9 +4,14 @@ import cPickle as pickle # step 4: Add replicas for files using rucio.client.replicaclient the ReplicaClient class (add_replicas) with open("datapaths.pck", "rb") as pathfile: - data_paths = pickle.load(pathfile) - print(data_paths[2]) + file_details = pickle.load(pathfile) -# step 4: Add dataset for each observation, add the files to the dataset + +rcl = replicaclient.ReplicaClient() +RSE = "SARA-DCACHE" + +rcl.add_replicas(RSE, file_details): +# ideally we should catch issues here as well... + # step 5: Add container and add the cal/tgt to the container together