Skip to content

Commit d7bc2b0

Browse files
author
root
committed
Moved Analyze User to its own module
1 parent 75c429a commit d7bc2b0

File tree

3 files changed

+63
-54
lines changed

3 files changed

+63
-54
lines changed

api/Submission.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import Parameters
55
from Helpers import *
66

7-
#from helpers import *
87

98
class search:
109
params = None

api/User.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import time
2+
import html
3+
from collections import defaultdict
4+
import Parameters
5+
from Helpers import *
6+
7+
8+
class Analyze:
9+
def on_get(self, req, resp):
10+
start = time.time()
11+
params = req.params
12+
searchURL = 'http://mars:9200/rc/comments/_search'
13+
nested_dict = lambda: defaultdict(nested_dict)
14+
q = nested_dict()
15+
size = 25
16+
sort_direction = 'desc'
17+
q['query']['bool']['filter'] = []
18+
19+
if 'limit' in params:
20+
params['size'] = params['limit']
21+
22+
if 'size' in params and params['size'] is not None and LooksLikeInt(params['size']):
23+
size = 500 if int(params['size']) > 500 else int(params['size'])
24+
q['size'] = size
25+
else:
26+
q['size'] = 25
27+
28+
if 'author' in params and params['author'] is not None:
29+
terms = nested_dict()
30+
terms['terms']['author'] = [params['author'].lower()]
31+
q['query']['bool']['filter'].append(terms)
32+
33+
q['size'] = size
34+
q['sort']['score'] = sort_direction
35+
36+
q['aggs']['subreddit']['terms']['field'] = 'subreddit.keyword'
37+
q['aggs']['subreddit']['terms']['size'] = size
38+
q['aggs']['subreddit']['terms']['order']['_count'] = 'desc'
39+
40+
q['aggs']['link_id']['terms']['field'] = 'link_id'
41+
q['aggs']['link_id']['terms']['size'] = 25
42+
q['aggs']['link_id']['terms']['order']['_count'] = 'desc'
43+
44+
request = requests.get(searchURL, data=json.dumps(q))
45+
response = json.loads(request.text)
46+
47+
if response.get('aggregations', {}).get('link_id', {}).get('buckets',{}):
48+
for row in response['aggregations']['link_id']['buckets']:
49+
row['key'] = 't3_' + base36encode(row['key'])
50+
51+
end = time.time()
52+
data = {}
53+
data['data'] = response
54+
data['metadata'] = {}
55+
data['metadata']['execution_time_milliseconds'] = round((end - start) * 1000,2)
56+
data['metadata']['version'] = 'v3.0'
57+
resp.cache_control = ['public','max-age=2','s-maxage=2']
58+
resp.body = json.dumps(data,sort_keys=True,indent=4, separators=(',', ': '))
59+
60+
61+

api/api.py

Lines changed: 2 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -14,69 +14,18 @@
1414
from collections import defaultdict
1515
import Submission
1616
import Comment
17+
import User
1718
import Parameters
1819
from Helpers import *
1920
from configparser import ConfigParser
2021

21-
class AnalyzeUser:
22-
def on_get(self, req, resp):
23-
start = time.time()
24-
params = req.params
25-
searchURL = 'http://mars:9200/rc/comments/_search'
26-
nested_dict = lambda: defaultdict(nested_dict)
27-
q = nested_dict()
28-
size = 25
29-
sort_direction = 'desc'
30-
q['query']['bool']['filter'] = []
3122

32-
if 'limit' in params:
33-
params['size'] = params['limit']
34-
35-
if 'size' in params and params['size'] is not None and LooksLikeInt(params['size']):
36-
size = 500 if int(params['size']) > 500 else int(params['size'])
37-
q['size'] = size
38-
else:
39-
q['size'] = 25
40-
41-
if 'author' in params and params['author'] is not None:
42-
terms = nested_dict()
43-
terms['terms']['author'] = [params['author'].lower()]
44-
q['query']['bool']['filter'].append(terms)
45-
46-
q['size'] = size
47-
q['sort']['score'] = sort_direction
48-
49-
q['aggs']['subreddit']['terms']['field'] = 'subreddit.keyword'
50-
q['aggs']['subreddit']['terms']['size'] = size
51-
q['aggs']['subreddit']['terms']['order']['_count'] = 'desc'
52-
53-
q['aggs']['link_id']['terms']['field'] = 'link_id'
54-
q['aggs']['link_id']['terms']['size'] = 25
55-
q['aggs']['link_id']['terms']['order']['_count'] = 'desc'
56-
57-
request = requests.get(searchURL, data=json.dumps(q))
58-
response = json.loads(request.text)
59-
60-
if response.get('aggregations', {}).get('link_id', {}).get('buckets',{}):
61-
for row in response['aggregations']['link_id']['buckets']:
62-
row['key'] = 't3_' + base36encode(row['key'])
63-
64-
end = time.time()
65-
data = {}
66-
data['data'] = response
67-
data['metadata'] = {}
68-
data['metadata']['execution_time_milliseconds'] = round((end - start) * 1000,2)
69-
data['metadata']['version'] = 'v3.0'
70-
resp.cache_control = ['public','max-age=2','s-maxage=2']
71-
resp.body = json.dumps(data,sort_keys=True,indent=4, separators=(',', ': '))
72-
73-
r = redis.StrictRedis(host='localhost', port=6379, db=1)
7423
api = falcon.API()
7524
api.add_route('/reddit/search', Comment.search())
7625
api.add_route('/reddit/comment/search', Comment.search())
7726
api.add_route('/reddit/search/comment', Comment.search())
7827
api.add_route('/reddit/search/submission', Submission.search())
7928
api.add_route('/reddit/submission/search', Submission.search())
80-
api.add_route('/reddit/analyze/user', AnalyzeUser())
29+
api.add_route('/reddit/analyze/user', User.Analyze())
8130
api.add_route('/get/comment_ids/{submission_id}', Submission.getCommentIDs())
8231
api.add_route('/reddit/submission/comment_ids/{submission_id}', Submission.getCommentIDs())

0 commit comments

Comments
 (0)