Mercurial > repos > damion > versioned_data
comparison data_store_utils.py @ 1:5c5027485f7d draft
Uploaded correct file
author | damion |
---|---|
date | Sun, 09 Aug 2015 16:07:50 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
0:d31a1bd74e63 | 1:5c5027485f7d |
---|---|
1 | |
2 def version_cache_setup(dataset_id, data_file_cache_folder, cacheable_dataset): | |
3 """ UNUSED: Idea was to enable caching of workflow products outside of galaxy for use by others. | |
4 CONSIDER METACODE. NOT INTEGRATED, NOT TESTED. | |
5 """ | |
6 data_file_cache_name = os.path.join(data_file_cache_folder, dataset_id ) #'blastdb.txt' | |
7 if os.path.isfile(data_file_cache_name): | |
8 pass | |
9 else: | |
10 if os.path.isdir(data_file_cache_folder): | |
11 shutil.rmtree(data_file_cache_folder) | |
12 os.makedirs(data_file_cache_folder) | |
13 # Default filename=false means we're supplying the filename. | |
14 gi.datasets.download_dataset(dataset_id, file_path=data_file_cache_name, use_default_filename=False, wait_for_completion=True) # , maxwait=12000) is a default of 3 hours | |
15 | |
16 # Generically, any dataset might have subfolders - to check we have to | |
17 # see if galaxy dataset file path has contents at _files suffix. | |
18 # Find dataset_id in version retrieval history datasets, and get its folder path, and copy _files over... | |
19 galaxy_dataset_folder = cacheable_dataset['file_name'][0:-4] + '_files' | |
20 time.sleep(2) | |
21 if os.path.isdir(galaxy_dataset_folder) \ | |
22 and not os.path.isdir(data_file_cache_folder + '/files/'): | |
23 print 'Copying ' + galaxy_dataset_folder + ' to ' + data_file_cache_folder | |
24 # Copy program makes target folder. | |
25 shutil.copytree(galaxy_dataset_folder, data_file_cache_folder + '/files/') # , symlinks=False, ignore=None | |
26 |