Skip to content

Commit

Permalink
twitterScrapping
Browse files Browse the repository at this point in the history
  • Loading branch information
Hadjer711 committed Jun 25, 2020
1 parent 21af803 commit 7a75ed1
Show file tree
Hide file tree
Showing 19 changed files with 77 additions and 17 deletions.
4 changes: 4 additions & 0 deletions Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,7 @@ pip install django-cors-headers
pip install djangorestframework-simplejwt

pip install dj_database_url

pip install tweepy

pip install feedparser
Binary file added __pycache__/config.cpython-38.pyc
Binary file not shown.
7 changes: 7 additions & 0 deletions config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#for twitter scrapping
CONSUMER_KEY = 'dF8LarLqWcQAlZ3FqomAFGWNZ'
CONSUMER_SECRET = 'eaKxAM5scchwogxYo8oEzC4XkangtrDMsuNqx8ZlqBDuVMpBpZ'
ACCESS_TOKEN = '1046391379360919552-IOEg7xCFH03syq18cGhU3DJxDRVj4D'
ACCESS_TOKEN_SECRET = 'UUjxB6r8yyEQAe0ffcGI7oqPvKI50pxwwmZGSmXkbQbt3'

#email de notifications
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ djangorestframework
gunicorn
psycopg2-binary
dj-database-url
Scrapy
tweepy
feedparser
Binary file modified robot/__pycache__/models.cpython-38.pyc
Binary file not shown.
Binary file added robot/__pycache__/twitterScrapping.cpython-38.pyc
Binary file not shown.
Binary file modified robot/__pycache__/views.cpython-38.pyc
Binary file not shown.
22 changes: 14 additions & 8 deletions robot/migrations/0001_initial.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Generated by Django 3.0.4 on 2020-06-22 13:56
# Generated by Django 3.0.4 on 2020-06-25 21:45

from django.db import migrations, models

Expand All @@ -12,35 +12,41 @@ class Migration(migrations.Migration):

operations = [
migrations.CreateModel(
name='GoogleSearchResult',
name='Tweets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('proprio', models.CharField(max_length=50)),
('content', models.CharField(max_length=500)),
('date', models.DateTimeField()),
('valide', models.BooleanField(default=False)),
('supprime', models.BooleanField(default=False)),
('titre', models.CharField(max_length=20)),
('description', models.TextField()),
('url', models.URLField()),
('date', models.DateTimeField()),
],
),
migrations.CreateModel(
name='Tweets',
name='VideoYoutube',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valide', models.BooleanField(default=False)),
('supprime', models.BooleanField(default=False)),
('titre', models.CharField(max_length=200)),
('description', models.TextField()),
('url', models.URLField()),
],
),
migrations.CreateModel(
name='VideoYoutube',
name='GoogleSearchResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valide', models.BooleanField(default=False)),
('supprime', models.BooleanField(default=False)),
('titre', models.CharField(max_length=20)),
('titre', models.CharField(max_length=200)),
('description', models.TextField()),
('url', models.URLField()),
('date', models.CharField(max_length=50)),
],
options={
'unique_together': {('url',)},
},
),
]
17 changes: 17 additions & 0 deletions robot/migrations/0002_remove_tweets_url.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Generated by Django 3.0.4 on 2020-06-25 21:49

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('robot', '0001_initial'),
]

operations = [
migrations.RemoveField(
model_name='tweets',
name='url',
),
]
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
# Generated by Django 3.0.4 on 2020-06-22 14:32
# Generated by Django 3.0.4 on 2020-06-25 21:56

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('robot', '0001_initial'),
('robot', '0002_remove_tweets_url'),
]

operations = [
migrations.AlterField(
model_name='googlesearchresult',
model_name='tweets',
name='date',
field=models.CharField(max_length=50),
),
Expand Down
Binary file modified robot/migrations/__pycache__/0001_initial.cpython-38.pyc
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file modified robot/migrations/__pycache__/__init__.cpython-38.pyc
Binary file not shown.
9 changes: 6 additions & 3 deletions robot/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,22 @@
class VideoYoutube(models.Model):
valide = models.BooleanField(default=False)
supprime = models.BooleanField(default=False)
titre = models.CharField(max_length=20)
titre = models.CharField(max_length=200)
description = models.TextField()
url= models.URLField()

class Tweets(models.Model):
proprio=models.CharField(max_length=50)
content= models.CharField(max_length=500)
date= models.CharField(max_length=50)
valide = models.BooleanField(default=False)
supprime = models.BooleanField(default=False)
url = models.URLField()


class GoogleSearchResult(models.Model):
valide = models.BooleanField(default=False)
supprime = models.BooleanField(default=False)
titre = models.CharField(max_length=20)
titre = models.CharField(max_length=200)
description = models.TextField()
url= models.URLField()
date= models.CharField(max_length=50)
Expand Down
22 changes: 22 additions & 0 deletions robot/twitterScrapping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import tweepy
import sys
from .models import Tweets
from config import ACCESS_TOKEN, ACCESS_TOKEN_SECRET,CONSUMER_KEY,CONSUMER_SECRET

def getTweets(counts):

auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)

api = tweepy.API(auth)
try:
api.verify_credentials()
print('Authentication Successful')
except:
print('Error while authenticating API')
sys.exit(1)

covid_tweets = tweepy.Cursor(api.search, q='فيروس كورونا').items(counts)
for tweet in covid_tweets:
tweetObj= Tweets.objects.create(content=tweet.text, date=tweet.created_at, proprio=tweet.user.name)
tweetObj.save()
5 changes: 3 additions & 2 deletions robot/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


from rest_framework import viewsets, permissions

from .twitterScrapping import getTweets
from .googleSearchScrapping import ParseFeed
from .serializers import *
from .models import *
Expand Down Expand Up @@ -119,8 +119,9 @@ class TweetsViewSet(viewsets.ModelViewSet):
serializer_class = TweetSerializer

@action(methods=['post', 'get'], detail=False)
def show_list(self, request):
def getData(self, request):
if(request.method == "GET"):
getTweets(50)
data = Tweets.objects.all()
serializers = TweetSerializer(data, many=True)
return Response(serializers.data)
Expand Down
Empty file removed robot/youtubeScrapping.py
Empty file.

0 comments on commit 7a75ed1

Please sign in to comment.