Skip to content

Commit

Permalink
use json to store data instead of txt
Browse files Browse the repository at this point in the history
  • Loading branch information
rhl-bthr committed Sep 14, 2018
1 parent 79a8073 commit b6b38e2
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 36 deletions.
39 changes: 16 additions & 23 deletions src/installation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
from getpass import getpass
import json

try:
from bs4 import BeautifulSoup
Expand All @@ -13,43 +14,35 @@


try:
while True:
email = input("\nEnter BITS ID [Eg: f2016015]\n")
email += "@pilani.bits-pilani.ac.in"
config = {}
name_url_pair = {}

password = getpass(prompt = "Enter nalanda password:")
while True:
config["username"] = input("\nEnter BITS ID [Eg: f2016015]\n") +
"@pilani.bits-pilani.ac.in"

with open(join(INSTALL_PATH, "config.txt"), "w") as f:
config = email + "\n" + password
f.write(config)
config["password"] = getpass(prompt = "Enter nalanda password:")

result = session.post(
"http://nalanda.bits-pilani.ac.in/login/index.php",
data={
"username": email,
"password": password,
})
data = config)
result = BeautifulSoup(result.text, "html.parser")

if not result.find_all("a", {"id": "loginerrormessage"}):
break
print("Username or Password Incorrect. Please retry")

with open(join(INSTALL_PATH, "config.json"), "w") as f:
json.dumps(config, f)

with open(join(INSTALL_PATH, "sub-url.txt"), "w") as url_file:
with open(join(INSTALL_PATH, "sub-name.txt"), "w") as name_file:

result = session.get("http://nalanda.bits-pilani.ac.in/my")
soup = BeautifulSoup(result.text, "html.parser")

sub_url, sub_name = [], []
with open(join(INSTALL_PATH, "subjects.json"), "w") as sub_file:
result = session.get("http://nalanda.bits-pilani.ac.in/my")
soup = BeautifulSoup(result.text, "html.parser")

for x in soup.find_all("div", "column c1"):
sub_url.append(x.contents[0].get("href"))
sub_name.append(((x.contents[0].contents[1]).split("/")[0]).split("\\")[0])
for x in soup.find_all("div", "column c1"):
name_url_pair[x.contents[0].get("href")] = ((x.contents[0].contents[1]).split("/")[0]).split("\\")[0]
json.dumps(name_url_pair, sub_file)

url_file.write("\n".join(sub_url))
name_file.write("\n".join(sub_name))

except KeyboardInterrupt:
quit("Installation cancelled by user. Please retry.")
Expand Down
20 changes: 7 additions & 13 deletions src/nalanda
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,20 @@
from bs4 import BeautifulSoup
import os
import requests
import json

join = os.path.join

INSTALL_PATH = join(os.path.expanduser("~"), ".nalanda-cli")
SLIDES_PATH = join(os.path.expanduser("~"), "BITS")
SUB_NAMES = open(join(INSTALL_PATH, "sub-name.txt")).read().split("\n")
SUB_URLS = open(join(INSTALL_PATH, "sub-url.txt")).read().split("\n")
subs = json.load(open(join(INSTALL_PATH, "subjects.json"), "w"))
SUB_NAMES = subs.keys()
SUB_URLS = subs.values()

ZIP_FILE_LINK = "http://nalanda.bits-pilani.ac.in/mod/folder/download_folder.php"
NALANDA_LOGIN = "http://nalanda.bits-pilani.ac.in/login/index.php"


def login():
session = requests.session()
username, password = open(join(INSTALL_PATH, "config.txt")).read().split("\n")
session.post(NALANDA_LOGIN, data={
"username": username,
"password": password,
})
return session


def sorting_links():
sub_links = [BeautifulSoup(session.get(sub).text, "html.parser")
.find_all("a", {"onclick": ""}) for sub in SUB_URLS]
Expand Down Expand Up @@ -128,7 +121,8 @@ def main():
"""Displaying notices, news and other announcements, updating slides"""
try:
print ("\t\t" + bold("**nalanda-cli**"))
session = login()
session = requests.session()
session.post(NALANDA_LOGIN, data=json.loads(open(join(INSTALL_PATH, "config.json"))))
notice_urls, news_urls, res_urls = sorting_links()
subject_news_url = get_news(session, news_urls)
unread_news = find_new(session, notice_urls, subject_news_url)
Expand Down

0 comments on commit b6b38e2

Please sign in to comment.