-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapplication.py
100 lines (76 loc) · 2.57 KB
/
application.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import json
import requests
import crochet
import sqlite
crochet.setup()
from flask import Flask, render_template, jsonify, request, redirect, url_for
from scrapy import signals
from scrapy.crawler import CrawlerRunner
from scrapy.signalmanager import dispatcher
import time
from uuid import uuid4
from multiprocessing import Process
from ERP_Scraper.ERP_Scraper.spiders.internals import ERPObj
from ERP_Scraper.ERP_Scraper.spiders.endexam import ERP_END
from ERP_Scraper.ERP_Scraper.spiders.test2 import SP
application = Flask(__name__)
crawl_runner = CrawlerRunner()
dets = {}
sqlobj = sqlite.SQLOBJ()
reqcount=0
@application.route("/getdata", methods=['POST'])
def getdata():
data = request.get_json()
session_id = str(uuid4())
data["session_id"]=session_id
requests.post(request.host_url[:-1] + url_for("scrape"), json=data)
# print(dets[session_id], session_id)
return jsonify({"session_id":session_id})
@application.route("/getstats/<uid>")
def stats(uid):
global reqcount
if uid=='2d2ff1a8-4ab0-4201-a5e6-4d448ad032d9':
return jsonify({
"status":"Authorized",
"requests":reqcount
})
else:
return jsonify({
"status":"Not authorized",
})
@application.route("/scrape", methods=["POST"])
def scrape():
global reqcount
reqcount+=1
data = request.get_json()
session_id = data.get('session_id')
method = data.get("method")
# p=Process(target=scrape_with_crochet, args=(data, ))
# p.start()
scrape_with_crochet(data)
time.sleep(5)
return jsonify({"name":"hey"})
# return jsonify(output_data)
@application.route("/results/<var>", methods=["GET"])
def results(var):
uid, top = var.split("|||")
res = sqlobj.get(uid, top)
sqlobj.delete(uid)
return jsonify(res)
@crochet.run_in_reactor
def scrape_with_crochet(data):
# print("Session",data)
dispatcher.connect(_crawler_result, signal=signals.item_scraped)
eventual = None
if data["method"]=="internals":
eventual = crawl_runner.crawl(ERPObj, dets=data.get('cookies', {}), uid=data.get("session_id"), year=data.get("year"), sem=data.get("sem"))
elif data["method"]=="endsem":
eventual = crawl_runner.crawl(ERP_END, dets=data.get('cookies', {}), uid=data.get("session_id"), year=data.get("year"), sem=data.get("sem"))
return eventual
def _crawler_result(item, response, spider):
item = dict(item)
# print(item)
# print("item",item)
sqlobj.insert(item["uid"],item["comps"],item["type"])
if __name__ == "__main__":
application.run(debug=True)