Skip to content

Commit

Permalink
Merge pull request #659 from bounswe/workspace-request-bug-fix
Browse files Browse the repository at this point in the history
Bug Fix
  • Loading branch information
hakanaktas0 authored Dec 22, 2023
2 parents c44d425 + bcab77e commit d8d210d
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 16 deletions.
53 changes: 38 additions & 15 deletions project/backend/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,14 @@ def post(self, request):


def search(request):

res = BasicUserDetailAPI.as_view()(request)
try:
admin = Admin.objects.filter(pk=request.user.basicuser)
is_admin = False
if admin.exists():
is_admin = True
except:
is_admin = False
search = request.GET.get("query")
search_type = request.GET.get("type")
if (search == None or search == "") and search_type != 'random' and search_type != 'trending' and search_type != 'latest' and search_type != 'most_read' and search_type != 'for_you':
Expand Down Expand Up @@ -166,12 +173,14 @@ def search(request):
if search_type == 'latest':
all = Node.objects.order_by('-publish_date')[:50]
for node in all:
nodes.append(node.node_id)
if not node.removed_by_admin or is_admin:
nodes.append(node.node_id)

if search_type == 'most_read':
all = Node.objects.order_by('-num_visits')[:50]
for node in all:
nodes.append(node.node_id)
if not node.removed_by_admin or is_admin:
nodes.append(node.node_id)

if search_type == 'trending':
all = Node.objects.order_by('-publish_date')[:250]
Expand All @@ -183,7 +192,8 @@ def search(request):
sort.reverse()
sort = sort[:50]
for elem in sort:
nodes.append(elem[0])
if not Node.objects.get(node_id=elem[0]).removed_by_admin or is_admin:
nodes.append(elem[0])


if search_type == 'for_you':
Expand All @@ -195,9 +205,11 @@ def search(request):
nodes_q = tag.nodes
related_nodes_q = tag.related_nodes
for node in nodes_q:
non_rel_nodes.append(node.node_id)
if not node.removed_by_admin or is_admin:
non_rel_nodes.append(node.node_id)
for rel_node in related_nodes_q:
rel_nodes.append(rel_node.node_id)
if not rel_node.removed_by_admin or is_admin:
rel_nodes.append(rel_node.node_id)
random.shuffle(non_rel_nodes)
random.shuffle(rel_nodes)
nodes = non_rel_nodes + rel_nodes
Expand All @@ -213,21 +225,24 @@ def search(request):
if Contributor.objects.filter(user_id=e.id).count() != 0:
cont_nodes = Contributor.objects.get(user_id=e.id).NodeContributors.all()
for node in cont_nodes:
nodes.append(node.node_id)
if not node.removed_by_admin or is_admin:
nodes.append(node.node_id)

for e in res_surname:
if Contributor.objects.filter(user_id=e.id).count() != 0:
cont_nodes = Contributor.objects.get(user_id=e.id).NodeContributors.all()
for node in cont_nodes:
nodes.append(node.node_id)
if not node.removed_by_admin or is_admin:
nodes.append(node.node_id)

contributors = []
if search_type == 'node' or search_type == 'all':
search_elements = search.split()
for el in search_elements:
res = Node.objects.annotate(search=SearchVector("node_title")).filter(node_title__icontains=el)
for e in res:
nodes.append(e.node_id)
if not e.removed_by_admin or is_admin:
nodes.append(e.node_id)

if search_type == 'author' or search_type == 'all': # TODO This method is too inefficient
search_elements = search.split()
Expand All @@ -250,9 +265,11 @@ def search(request):
nodes_q = tag.nodes
related_nodes_q = tag.related_nodes
for node in nodes_q:
nodes.append(node.node_id)
if not node.removed_by_admin or is_admin:
nodes.append(node.node_id)
for rel_node in related_nodes_q:
nodes.append(rel_node.node_id)
if not node.removed_by_admin or is_admin:
nodes.append(rel_node.node_id)

if search_type == 'random':
count = Node.objects.count()
Expand All @@ -267,8 +284,9 @@ def search(request):
if ran not in prev:
prev.append(ran)
random_node = Node.objects.all()[ran]
nodes.append(random_node.node_id)
i += 1
if not random_node.removed_by_admin or is_admin:
nodes.append(random_node.node_id)
i += 1
contributors = list(set(contributors))
if not (search_type == 'latest' or search_type == 'most_read' or search_type == 'for_you' or search_type == 'trending'):
nodes = list(set(nodes))
Expand All @@ -286,7 +304,7 @@ def search(request):
user = User.objects.get(id=cont.user_id)
authors.append({'name': user.first_name,
'surname': user.last_name, 'username': user.username, 'id': cont.id})
node_infos.append({'id': node_id, 'title': node.node_title, 'date': node.publish_date, 'authors': authors, 'num_visits' : node.num_visits})
node_infos.append({'id': node_id, 'title': node.node_title, 'date': node.publish_date, 'authors': authors, 'num_visits' : node.num_visits,'removed_by_admin':node.removed_by_admin})
return JsonResponse({'nodes' : node_infos , 'authors' :res_authors },status=200)

def get_profile(request):
Expand Down Expand Up @@ -447,7 +465,7 @@ def get_workspaces(request):
pending_review = []
review_workspace_list = []
if Reviewer.objects.filter(pk=request.user.basicuser.pk).exists():
reviwer = Reviewer.objects.filter(id=json.loads(res.content.decode())['basic_user_id'])
reviwer = Reviewer.objects.filter(id=json.loads(res.content.decode())['basic_user_id'])[0]
for workspace in reviwer.review_workspaces.all():
for req in ReviewRequest.objects.filter(workspace=workspace):
if req.status == 'A' and req.response == 'P':
Expand Down Expand Up @@ -480,9 +498,14 @@ def get_workspace_from_id(request):
if not IsContributor().has_permission(request, get_workspace_from_id):
return JsonResponse({'message': 'User is not a Contributor'}, status=403)
reviewer = Reviewer.objects.filter(pk=request.user.basicuser)
cont = Contributor.objects.get(pk=request.user.basicuser)
flag = True
workspace = workspace[0]
if reviewer.exists():
for req in ReviewRequest.objects.filter(receiver=cont):
if req.workspace.workspace_id == workspace.workspace_id and req.status == 'P':
flag = False
request_id = req.id
if workspace in reviewer[0].review_workspaces.all():
cont = Contributor.objects.filter(pk=request.user.basicuser)[0]
requests = ReviewRequest.objects.filter(workspace=workspace)
Expand Down
2 changes: 1 addition & 1 deletion project/backend/database/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def create(self, validated_data):
class NodeViewProofSerializer(serializers.ModelSerializer):
class Meta:
model = Proof
fields = ['proof_content', 'publish_date','contributors']
fields = ['proof_content', 'publish_date','contributors','is_disproof']

class NodeViewTheoremSerializer(serializers.ModelSerializer):
class Meta:
Expand Down

0 comments on commit d8d210d

Please sign in to comment.