Newer
Older
import hashlib
from django.contrib.auth import get_user_model
from django.db import connection
from django.utils import timezone
from graphql.error import GraphQLError
from graphene_django.types import DjangoObjectType
from graphene_django.views import GraphQLView
from graphene_django.forms.mutation import DjangoModelFormMutation
from portal.models import DataFile, DataSet, Lab, Profile, ShareGroup
from portal.views import TokenLoginMixin
from portal.forms import DataSetForm
# Protect graphql API page
class PrivateGraphQLView(TokenLoginMixin, GraphQLView):
pass
# Create a GraphQL type for the various models
class DataFileType(DjangoObjectType):
class Meta:
model = DataFile
fields = "__all__"
class DataSetType(DjangoObjectType):
class Meta:
model = DataSet
fields = "__all__"
class ProfileType(DjangoObjectType):
class Meta:
model = Profile
fields = ['id', 'accountname', 'unix_username', 'user', 'labs']
class UserType(DjangoObjectType):
class Meta:
model = get_user_model()
fields = ['id', 'username', 'first_name', 'last_name', 'email', 'is_staff']
class LabType(DjangoObjectType):
class Meta:
model = Lab
fields = ['id', 'name', 'pi', 'data_managers', 'institution']
class ShareGroupType(DjangoObjectType):
class Meta:
model = ShareGroup
fields = ['id', 'name', 'profiles']
# Create a Query type
class Query(graphene.ObjectType):
datafile = graphene.Field(DataFileType, id=graphene.String(), dbid=graphene.ID())
dataset = graphene.Field(DataSetType, id=graphene.String(), dbid=graphene.ID())
datafiles = graphene.List(
DataFileType,
key=graphene.String(),
value=graphene.String(),
key_values=graphene.JSONString(),
keys=graphene.JSONString(),
values=graphene.JSONString(),
dataset=graphene.String(),
file_hash=graphene.String()
)
datasets = graphene.List(DataSetType)
lab = graphene.Field(LabType, name=graphene.String(), dbid=graphene.ID())
labs = graphene.List(
LabType,
in_name_list=graphene.List(graphene.NonNull(graphene.String))
)
profiles = graphene.List(
ProfileType,
in_email_list=graphene.List(graphene.NonNull(graphene.String))
)
sharegroups = graphene.List(
ShareGroupType,
in_name_list=graphene.List(graphene.NonNull(graphene.String))
)
def resolve_datafile(self, info, **kwargs):
id = kwargs.get('id')
dbid = kwargs.get('dbid')
qs = DataFile.objects.accessible_to_profile(self.request.user.profile)
if id is not None:
return qs.filter(iric_data_id=id).first()
elif dbid is not None:
return qs.filter(pk=dbid).first()
def resolve_dataset(self, info, **kwargs):
id = kwargs.get('id')
dbid = kwargs.get('dbid')
qs = DataSet.objects.accessible_to_profile(self.request.user.profile)
if id is not None:
return qs.filter(iric_data_id=id).first()
return qs.filter(pk=dbid).first()
return None
def resolve_datafiles(self, info, **kwargs):
qs = DataFile.objects.accessible_to_profile(self.request.user.profile)
# qs = DataFile.objects
key = kwargs.get('key', None)
value = kwargs.get('value', None)
key_values = kwargs.get('key_values', None)
keys = kwargs.get('keys', None)
values = kwargs.get('values', None)
dataset = kwargs.get('dataset', None)
file_hash = kwargs.get('file_hash', None)
if type(key_values) is not dict:
key_values = {}
if type(keys) is not list:
keys = []
if type(values) is not list:
values = []
if key and value:
qs = qs.filter(annotations__contains={key: value})
elif key:
qs = qs.filter(annotations__has_key=key)
elif value:
qs = qs.filter(annotations__values__icontains=value)
for k, v in key_values.items():
qs = qs.filter(annotations__contains={k: v})
for k in keys:
qs = qs.filter(annotations__has_key=k)
for v in values:
qs = qs.filter(annotations__values__icontains=v)
if dataset:
qs = qs.filter(datasets__iric_data_id=dataset)
if file_hash:
iric_data_file_hash = os.path.join(file_hash[:3], file_hash)
qs = qs.filter(file=iric_data_file_hash)
return qs.distinct()
def resolve_datasets(self, info, **kwargs):
return DataSet.objects.accessible_to_profile(self.request.user.profile)
def resolve_lab(self, info, **kwargs):
name = kwargs.get('name')
dbid = kwargs.get('dbid')
if name is not None:
return Lab.objects.get(name=name)
elif dbid is not None:
return Lab.objects.get(pk=dbid)
def resolve_labs(self, info, **kwargs):
name_list = kwargs.get('in_name_list')
if name_list is not None:
return Lab.objects.filter(name__in=name_list)
def resolve_profiles(self, info, **kwargs):
email_list = kwargs.get('in_email_list')
if email_list is not None:
return Profile.objects.filter(user__email__in=email_list)
def resolve_sharegroups(self, info, **kwargs):
name_list = kwargs.get('in_name_list')
if name_list is not None:
return ShareGroup.objects.filter(name__in=[name_list])
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
class DataSetMutation(DjangoModelFormMutation):
"""Enable the creation or modification of a DataSet
Makes use of the DataSetForm, so all exposed fields from that form are available"""
class Meta:
form_class = DataSetForm
return_field_name = 'dataset'
@classmethod
def perform_mutate(cls, form, info):
object = form.save(commit=False)
if not object.pk:
object.read_only = True
object.created_by = info.context.user.profile
object.last_update_by = info.context.user.profile
object.save()
return super().perform_mutate(form, info)
class DataFileAttachMutation(graphene.Mutation):
attached_datafile = graphene.Field(DataFileType)
class Arguments:
# The input arguments for this mutation
file_hash = graphene.String(required=True)
filename = graphene.String(required=True)
target_user_email = graphene.String(required=True)
annotations = graphene.String()
@classmethod
def mutate(cls, root, info, file_hash, filename, target_user_email, annotations=None):
# check that calling user is staff:
if not info.context.user.is_staff:
raise GraphQLError("You are not allowed to use this API function")
# transform file_hash
iric_data_file_hash = os.path.join(file_hash[:3], file_hash)
target_profile = Profile.objects.get(user__email=target_user_email)
if not DataFile.objects.filter(file=iric_data_file_hash).exists():
raise GraphQLError("No datafile associated with the provided hash")
with connection.cursor() as cursor:
cursor.execute(
"INSERT INTO portal_datafile(file, filename, uploaded_by_id, upload_timestamp, annotations) VALUES (%s, %s, %s, %s, %s)",
[
iric_data_file_hash,
filename,
target_profile.id,
timezone.now(),
annotations
]
)
df = DataFile.objects.latest('id')
df.iric_data_id = "DF{}".format(hashlib.md5(str(df.id).encode()).hexdigest()[:8]).upper()
df.save()
return DataFileAttachMutation(attached_datafile=df)
class Mutation(graphene.ObjectType):
dataset = DataSetMutation.Field()
attach_datafile_by_hash = DataFileAttachMutation.Field()
schema = graphene.Schema(query=Query, mutation=Mutation, auto_camelcase=False)