|
14 | 14 | from collections import defaultdict
|
15 | 15 | import Submission
|
16 | 16 | import Comment
|
| 17 | +import User |
17 | 18 | import Parameters
|
18 | 19 | from Helpers import *
|
19 | 20 | from configparser import ConfigParser
|
20 | 21 |
|
21 |
| -class AnalyzeUser: |
22 |
| - def on_get(self, req, resp): |
23 |
| - start = time.time() |
24 |
| - params = req.params |
25 |
| - searchURL = 'http://mars:9200/rc/comments/_search' |
26 |
| - nested_dict = lambda: defaultdict(nested_dict) |
27 |
| - q = nested_dict() |
28 |
| - size = 25 |
29 |
| - sort_direction = 'desc' |
30 |
| - q['query']['bool']['filter'] = [] |
31 | 22 |
|
32 |
| - if 'limit' in params: |
33 |
| - params['size'] = params['limit'] |
34 |
| - |
35 |
| - if 'size' in params and params['size'] is not None and LooksLikeInt(params['size']): |
36 |
| - size = 500 if int(params['size']) > 500 else int(params['size']) |
37 |
| - q['size'] = size |
38 |
| - else: |
39 |
| - q['size'] = 25 |
40 |
| - |
41 |
| - if 'author' in params and params['author'] is not None: |
42 |
| - terms = nested_dict() |
43 |
| - terms['terms']['author'] = [params['author'].lower()] |
44 |
| - q['query']['bool']['filter'].append(terms) |
45 |
| - |
46 |
| - q['size'] = size |
47 |
| - q['sort']['score'] = sort_direction |
48 |
| - |
49 |
| - q['aggs']['subreddit']['terms']['field'] = 'subreddit.keyword' |
50 |
| - q['aggs']['subreddit']['terms']['size'] = size |
51 |
| - q['aggs']['subreddit']['terms']['order']['_count'] = 'desc' |
52 |
| - |
53 |
| - q['aggs']['link_id']['terms']['field'] = 'link_id' |
54 |
| - q['aggs']['link_id']['terms']['size'] = 25 |
55 |
| - q['aggs']['link_id']['terms']['order']['_count'] = 'desc' |
56 |
| - |
57 |
| - request = requests.get(searchURL, data=json.dumps(q)) |
58 |
| - response = json.loads(request.text) |
59 |
| - |
60 |
| - if response.get('aggregations', {}).get('link_id', {}).get('buckets',{}): |
61 |
| - for row in response['aggregations']['link_id']['buckets']: |
62 |
| - row['key'] = 't3_' + base36encode(row['key']) |
63 |
| - |
64 |
| - end = time.time() |
65 |
| - data = {} |
66 |
| - data['data'] = response |
67 |
| - data['metadata'] = {} |
68 |
| - data['metadata']['execution_time_milliseconds'] = round((end - start) * 1000,2) |
69 |
| - data['metadata']['version'] = 'v3.0' |
70 |
| - resp.cache_control = ['public','max-age=2','s-maxage=2'] |
71 |
| - resp.body = json.dumps(data,sort_keys=True,indent=4, separators=(',', ': ')) |
72 |
| - |
73 |
| -r = redis.StrictRedis(host='localhost', port=6379, db=1) |
74 | 23 | api = falcon.API()
|
75 | 24 | api.add_route('/reddit/search', Comment.search())
|
76 | 25 | api.add_route('/reddit/comment/search', Comment.search())
|
77 | 26 | api.add_route('/reddit/search/comment', Comment.search())
|
78 | 27 | api.add_route('/reddit/search/submission', Submission.search())
|
79 | 28 | api.add_route('/reddit/submission/search', Submission.search())
|
80 |
| -api.add_route('/reddit/analyze/user', AnalyzeUser()) |
| 29 | +api.add_route('/reddit/analyze/user', User.Analyze()) |
81 | 30 | api.add_route('/get/comment_ids/{submission_id}', Submission.getCommentIDs())
|
82 | 31 | api.add_route('/reddit/submission/comment_ids/{submission_id}', Submission.getCommentIDs())
|
0 commit comments