Skip to content

Commit 90127af

Browse files
committed
remove if __name__ == __main__ condition
1 parent 3b21e56 commit 90127af

File tree

1 file changed

+75
-77
lines changed

1 file changed

+75
-77
lines changed

src/nalanda

Lines changed: 75 additions & 77 deletions
Original file line numberDiff line numberDiff line change
@@ -36,82 +36,80 @@ for sub in SUB_NAMES:
3636
def bold(text):
3737
return "\033[1m" + text + "\033[0m"
3838

39-
40-
if __name__ == "__main__":
41-
try:
42-
session = requests.session()
43-
session.post(LOGIN_LINK, data=json.load(open(CONFIG_FILE)))
44-
45-
links = [BeautifulSoup(session.get(sub).text, "html.parser").find_all("a", {"onclick": ""}) for sub in SUB_URLS]
46-
for sub in RANGE_SUBS:
47-
for y in links[sub]:
48-
url = y.get("href")
49-
if("resource/view.php?id" in url or "folder/view.php?id=" in url):
50-
URLS[SUB_URLS[sub]]["resource"].append(url)
51-
elif("page/view.php?id" in url):
52-
URLS[SUB_URLS[sub]]["notice"].append(url + "$%^" + y.contents[1].contents[0])
53-
elif("forum/view.php?id" in url):
54-
URLS[SUB_URLS[sub]]["news"].append(url)
55-
56-
for x in SUB_URLS:
57-
for y in URLS[x]["news"]:
39+
try:
40+
session = requests.session()
41+
session.post(LOGIN_LINK, data=json.load(open(CONFIG_FILE)))
42+
43+
links = [BeautifulSoup(session.get(sub).text, "html.parser").find_all("a", {"onclick": ""}) for sub in SUB_URLS]
44+
for sub in RANGE_SUBS:
45+
for y in links[sub]:
46+
url = y.get("href")
47+
if("resource/view.php?id" in url or "folder/view.php?id=" in url):
48+
URLS[SUB_URLS[sub]]["resource"].append(url)
49+
elif("page/view.php?id" in url):
50+
URLS[SUB_URLS[sub]]["notice"].append(url + "$%^" + y.contents[1].contents[0])
51+
elif("forum/view.php?id" in url):
52+
URLS[SUB_URLS[sub]]["news"].append(url)
53+
54+
for x in SUB_URLS:
55+
for y in URLS[x]["news"]:
56+
result = session.get(y)
57+
soup = BeautifulSoup(result.text, "html.parser")
58+
discussion_list = soup.find_all("tr", "discussion")
59+
for url in discussion_list:
60+
if url.find("td", "topic starter pinned"):
61+
URLS[x]["notice"].append(url.contents[0].contents[1].get(
62+
"href") + "$%^" + url.contents[0].contents[1].contents[0])
63+
else:
64+
URLS[x]["notice"].append(url.contents[0].contents[0].get(
65+
"href") + "$%^" + url.contents[0].contents[0].contents[0])
66+
URLS[x].pop("news", None)
67+
68+
DONE_URLS = json.load(open(DATA_FILE))
69+
new_news = [list(set(URLS[x]["notice"]) - set(DONE_URLS[x]["notice"])) for x in SUB_URLS]
70+
new_news = [[x.split("$%^") for x in new_news[sub]]for sub in RANGE_SUBS]
71+
72+
new_slides = [list(set(URLS[x]["resource"]) - set(DONE_URLS[x]["resource"])) for x in SUB_URLS]
73+
74+
new_sub_index = []
75+
for x in RANGE_SUBS:
76+
if new_slides[x]:
77+
new_sub_index.append(x)
78+
for y in new_slides[x]:
79+
if "folder/view" in y:
80+
id_param = y.split("php")[1]
81+
result = session.get(ZIP_FILE_LINK + id_param)
82+
else:
5883
result = session.get(y)
59-
soup = BeautifulSoup(result.text, "html.parser")
60-
discussion_list = soup.find_all("tr", "discussion")
61-
for url in discussion_list:
62-
if url.find("td", "topic starter pinned"):
63-
URLS[x]["notice"].append(url.contents[0].contents[1].get(
64-
"href") + "$%^" + url.contents[0].contents[1].contents[0])
65-
else:
66-
URLS[x]["notice"].append(url.contents[0].contents[0].get(
67-
"href") + "$%^" + url.contents[0].contents[0].contents[0])
68-
URLS[x].pop("news", None)
69-
70-
DONE_URLS = json.load(open(DATA_FILE))
71-
new_news = [list(set(URLS[x]["notice"]) - set(DONE_URLS[x]["notice"])) for x in SUB_URLS]
72-
new_news = [[x.split("$%^") for x in new_news[sub]]for sub in RANGE_SUBS]
73-
74-
new_slides = [list(set(URLS[x]["resource"]) - set(DONE_URLS[x]["resource"])) for x in SUB_URLS]
75-
76-
new_sub_index = []
84+
85+
file_name = result.headers["content-disposition"].split('e="')[1].split('"')[0]
86+
with open(join(SLIDES_PATH, SUB_NAMES[x], file_name), "wb") as f:
87+
f.write(result.content)
88+
89+
print(bold("News:"))
90+
if(sum([len(x) for x in new_news])==0):
91+
print("\tNo updates")
92+
else:
7793
for x in RANGE_SUBS:
78-
if new_slides[x]:
79-
new_sub_index.append(x)
80-
for y in new_slides[x]:
81-
if "folder/view" in y:
82-
id_param = y.split("php")[1]
83-
result = session.get(ZIP_FILE_LINK + id_param)
84-
else:
85-
result = session.get(y)
86-
87-
file_name = result.headers["content-disposition"].split('e="')[1].split('"')[0]
88-
with open(join(SLIDES_PATH, SUB_NAMES[x], file_name), "wb") as f:
89-
f.write(result.content)
90-
91-
print(bold("News:"))
92-
if(sum([len(x) for x in new_news])==0):
93-
print("\tNo updates")
94-
else:
95-
for x in RANGE_SUBS:
96-
if new_news[x]:
97-
print(bold("\n" + SUB_NAMES[x] + "-"))
98-
for y in range(len(new_news[x])):
99-
print("\t" + bold(str(y + 1)) + ". " + new_news[x][y][1] + "\n\t\t" + \
100-
new_news[x][y][0])
101-
102-
print ("\n" + "-" * 60 + "\n")
103-
104-
print(bold("Lectures:"))
105-
if not new_sub_index:
106-
print("\tNo updates")
107-
else:
108-
for x in new_sub_index:
109-
print ("\t" + bold(SUB_NAMES[x]) + " has new updates")
110-
print ("\tfile://" + SLIDES_PATH)
111-
112-
json.dump(URLS ,open(DATA_FILE, 'w'), indent=4)
113-
114-
except requests.exceptions.ConnectionError:
115-
quit("No Internet Connection. Please retry")
116-
except KeyboardInterrupt:
117-
print("Stopped by user.")
94+
if new_news[x]:
95+
print(bold("\n" + SUB_NAMES[x] + "-"))
96+
for y in range(len(new_news[x])):
97+
print("\t" + bold(str(y + 1)) + ". " + new_news[x][y][1] + "\n\t\t" + \
98+
new_news[x][y][0])
99+
100+
print ("\n" + "-" * 60 + "\n")
101+
102+
print(bold("Lectures:"))
103+
if not new_sub_index:
104+
print("\tNo updates")
105+
else:
106+
for x in new_sub_index:
107+
print ("\t" + bold(SUB_NAMES[x]) + " has new updates")
108+
print ("\tfile://" + SLIDES_PATH)
109+
110+
json.dump(URLS ,open(DATA_FILE, 'w'), indent=4)
111+
112+
except requests.exceptions.ConnectionError:
113+
quit("No Internet Connection. Please retry")
114+
except KeyboardInterrupt:
115+
print("Stopped by user.")

0 commit comments

Comments
 (0)