Skip to content
Snippets Groups Projects

Resolve "Allow to keep file content in memory"

Merged Albert Feghaly requested to merge 15-allow-to-keep-file-content-in-memory into master
1 unresolved thread
Files
2
+ 136
67
@@ -10,10 +10,11 @@ from bs4 import BeautifulSoup
import sys
from collections import namedtuple
from .tools import is_json
from pyiricdata.tools import is_json
from pyiricdata.exceptions import IricDataConnectionError
IDF = namedtuple('IricDataFile', ['name', 'data', 'metadata'])
IDF = namedtuple('IricDataFile', ['metadata', 'data', 'annotations'])
class Client:
@@ -41,13 +42,11 @@ class Client:
if login.status_code == 200:
if any(x in login.text for x in ['Erreur', 'Error']):
sys.stderr.write('ERROR: Connexion failed -- verify your username and password\n')
sys.exit(1)
raise IricDataConnectionError('Connexion failed -- verify your username and password')
else:
sys.stdout.write('Your connexion to IRIC-Data has been established [user=%s]\n' % self.user)
else:
sys.stderr.write('ERROR: Could not initiate connexion with IRIC-Data\n')
sys.exit(1)
raise IricDataConnectionError('Could not initiate connexion with IRIC-Data')
self.session = session
@@ -71,19 +70,86 @@ class Client:
""" Fetch Dataset Name and Slug ID available to the user as JSON """
def get_available_datasets(self):
r = self.session.get(os.path.join(self.url, 'secure/my-datasets/list/json/')).json()
r = self.session.get(
os.path.join(self.url, 'api/v1/my-datasets/list/json/')
).json()
df = pd.DataFrame(r['data'], columns=['dataset_name', 'dataset_slug'])
df.index = df.dataset_slug.copy()
df.index.name = 'ID'
return df
#""" Fetch Dataset Name and Slug ID available for the user as JSON """
#def get_available_datafiles(self):
# r = self.session.get(os.path.join(self.url, 'secure/datafiles/list/json/')).json()
# df = pd.DataFrame(r['data'], columns=['dataset_name', 'dataset_slug'])
# df.index = df.dataset_slug
# df.index.name = 'ID'
# return df
""" Return a DataFrame of user's files. A filter can be made on annotation and dataset """
def get_datafiles_list(self, key_anno=None, value_anno=None, dataset_id=None):
if key_anno is None and value_anno is None and dataset_id is None:
sys.stderr.write("WARNING: return all files isnt implemented\n")
return(None)
elif key_anno is not None and value_anno is not None:
if dataset_id is None:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-key-value',
key_anno, value_anno
)
).json()
else:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-key-value',
key_anno, value_anno, dataset_id
)
).json()
elif key_anno is not None:
if dataset_id is None:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-key',
key_anno
)
).json()
else:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-key',
key_anno, dataset_id
)
).json()
elif value_anno is not None:
if dataset_id is None:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-value',
value_anno
)
).json()
else:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-value',
value_anno, dataset_id
)
).json()
elif dataset_id is not None:
r = self.session.get(
os.path.join(
self.url,
'api/v1/datafiles/list/json/lookup-dataset',
dataset_id
)
).json()
df = pd.DataFrame(r['data']).rename({'id': 'numerical_id'}, axis=1) # id is internal to iric-data
df.index = df.iric_data_id
df.index.name = 'ID'
ordering = ['filename', 'numerical_id', 'hash']
df = df[ordering + [x for x in df.columns if x not in ordering]]
return(df)
""" Return metadata JSON for a given file_id """
def get_file_metadata(self, file_id):
@@ -105,85 +171,90 @@ class Client:
annotation = None
return annotation
""" Return a connector of the contents of a file for a given file_id """
def get_file_data_conn(self, file_id):
path = os.path.join(self.url, 'secure/datafiles/download', str(file_id))
try:
content = self.session.get(path, allow_redirects=True)
except:
sys.stderr.write('ERROR: File %s does not exist in database\n' % str(file_id))
content = None
return content
""" Return DataFrame (file_name, file_id, file_slug, file_hash) for files in a given dataset """
def get_dataset_filelist(self, dataset_id):
df = pd.DataFrame(columns=['file_id', 'file_name', 'file_hash', 'file_slug'])
if dataset_id not in self.datasets.index:
sys.stderr.write(
"ERROR: Dataset '{}' does not exist or you do not have permission to access it.".format(dataset_id) +\
" Try Client.datasets for more details.\n")
return df
files = self.session.get(os.path.join(self.url, 'secure/dataset', dataset_id, 'file-list')).text.split('\n')
file_id_list = np.unique([x.split('/')[-1] for x in files])
df.file_id = file_id_list
if files != ['']:
for index, row in df.iterrows():
file_meta = self.get_file_metadata(row['file_id'])
assert not file_meta is None
row['file_name'] = file_meta['filename']
row['file_slug'] = file_meta['iric_data_id']
row['file_hash'] = file_meta['hash']
df.index = df.file_slug.copy()
df.index.name = 'ID'
return df
sys.stderr.write("DEPRECATED: Please use " +
"get_datafiles_list(dataset_id=dataset_id)\n")
return(self.get_datafiles_list(dataset_id=dataset_id))
""" Get a subset of the available datasets for which there is a match """
def filter_datasets(self, term, exact_match=False):
if exact_match:
return self.datasets[self.datasets.dataset_name.str.fullmatch(term)]
else:
return self.datasets[self.datasets.dataset_name.str.contains(term)]
""" Get DatasetId by name"""
def get_dataset_id_by_name(self, name):
return self.datasets.loc[self.datasets.dataset_name==name,'dataset_slug'][0]
""" Get a subset of the available datasets for which name match a given term """
def search_dataset_names(self, term):
return self.datasets.loc[self.datasets.dataset_name.str.contains(term),:]
def filter_datafiles(self, term, exact_match=False, **kwargs): # kwargs refer to get_datafiles_list arguments
df = self.get_datafiles_list(**kwargs)
print(df)
if exact_match:
return df[df.filename.str.fullmatch(term)]
else:
return df[df.filename.str.contains(term)]
""" Get file content according to file_id """
def get_file(self, file_id):
file_object = self.get_file_metadata(file_id)
path = os.path.join(self.url, 'secure/datafiles/download', str(file_id))
try:
file_name = file_object['filename']
file_content = self.session.get(path, allow_redirects=True).content
file_annotation = self.get_file_metadata(file_id)
return IDF(file_name, file_content, file_annotation)
file_metadata = self.get_file_metadata(file_id)
file_content = self.get_file_data_conn(file_id).content
file_annotation = self.get_file_annotation(file_id)
return IDF(file_metadata, file_content, file_annotation)
except TypeError:
return IDF(None, None, None)
""" Download file according to file_id """
def dwnl_file_content(self, file_id, folder_out='', filename=''):
idf = self.get_file(file_id)
if idf.data is not None:
if folder_out and folder_out[0] == '/':
pass
def dwnl_file_content(self, file_id, folder_out=None, filename=None):
file_meta = self.get_file_metadata(file_id)
if file_meta is not None:
if folder_out:
if folder_out[0] != '/':
folder_out = os.path.join(os.getcwd(), folder_out)
else:
folder_out = os.path.join(os.getcwd(), folder_out)
os.makedirs(folder_out, exist_ok=True)
filename = idf.name if not filename else filename
folder_out = os.getcwd()
file_conn = self.get_file_data_conn(file_id)
filename = file_meta['filename'] if filename is None else filename
out_file_path = os.path.join(folder_out, filename)
if os.path.exists(out_file_path):
sys.stderr.write('Warning: File already exists at location %s, skipping.\n' % out_file_path)
else:
os.makedirs(folder_out, exist_ok=True)
with open(out_file_path, 'wb') as outfile:
print('Downloading %s' % out_file_path)
outfile.write(idf.data)
outfile.write(file_conn.content)
""" Write file annotations json to disk """
def dwnl_file_annotation(self, file_id, folder_out='', filename=''):
folder_out = folder_out if folder_out and folder_out[0] == '/' else os.path.join(os.getcwd(), folder_out)
os.makedirs(folder_out, exist_ok=True)
def dwnl_file_annotation(self, file_id, folder_out=None, filename=None):
file_meta = self.get_file_metadata(file_id)
if not file_meta is None:
if file_meta is not None:
if folder_out:
if folder_out[0] != '/':
folder_out = os.path.join(os.getcwd(), folder_out)
else:
folder_out = os.getcwd()
annotations = self.get_file_annotation(file_id)
filename = file_meta['filename'] if not filename else filename
filename = file_meta['filename'] if filename is None else filename
out_file_path = os.path.join(folder_out, filename + '.json')
if os.path.exists(out_file_path):
sys.stderr.write('Warning: File already exists at location %s, skipping.\n' % out_file_path)
else:
os.makedirs(folder_out, exist_ok=True)
with open(out_file_path, 'w') as outfile:
json.dump(annotations, outfile)
""" Download an entire dataset """
def dwnl_dataset(self, dataset_id, folder_out='', datasetname=''):
def dwnl_dataset(self, dataset_id, folder_out=None, datasetname=None):
dataset = self.get_dataset_filelist(dataset_id)
datasetname = self.datasets.loc[dataset_id].dataset_name if not datasetname else datasetname
datasetname = self.datasets.loc[dataset_id].dataset_name if datasetname is None else datasetname
for file_id in np.unique(dataset.file_id):
self.dwnl_file_content(file_id, os.path.join(folder_out, datasetname))
self.dwnl_file_annotation(file_id, os.path.join(folder_out, datasetname))
@@ -198,7+269,7 @@
else:
sys.stderr.write('Warning: Unspecified filename field and empty directory fields. Exiting...\n')
sys.error(1)
hierarchy = hierarchy_dirs + [hierarchy_fn]
dataset = self.get_dataset_filelist(dataset_id)
for file_id in dataset.file_id:
annotations = self.get_file_annotation(file_id)
@@ -221,7+292,7 @@
self.dwnl_file_content(file_id, folder_path, filename)
self.dwnl_file_annotation(file_id, folder_path, filename)
#def _check_dataset_annotation(self, dataset_id, annotation=[]):
# set([self.get_file_annotation[file_id].keys() for file_id in self.get_dataset_filelist(dataset_id)['file_id']])
@@ -371,8 +442,6 @@ class Client:
if resp.status_code == 200:
print('File update succesful on {}'.format(file_id))
else:
sys.stderr.write('ERROR: something went wrong during datafiles update\n')
sys.exit(2)
raise IricDataConnectionError('Something went wrong during datafile update')
else:
sys.stderr.write('FAILED: At least one error has occured, please fix them and try again.\n')
sys.exit(1)
raise IricDataConnectionError('At least one error has occured, please investigate and try again.')
Loading