-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathRMP.py
More file actions
executable file
·222 lines (180 loc) · 8.68 KB
/
RMP.py
File metadata and controls
executable file
·222 lines (180 loc) · 8.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
#!/usr/bin/env python3
'''
RateMyProfessors Project:
Gather review information for all UT professors,
perform frequency and sentiment analysis.
Calculate overall review of UT professors
and different departments.
Rachel Harris
Shivam Patel
Madeline Phillips
'''
import requests
import json
import math
from bs4 import BeautifulSoup
import argparse
import time
from json import JSONEncoder
#TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO
'''
TODO
@analysis:
make everything lowercase
do analysis
'''
#TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO
class Review():
def __init__( self, reviewBody, reviewTags, quality, difficulty ):
self.reviewBody = reviewBody
self.reviewTags = reviewTags
self.quality = quality
self.difficulty = difficulty
def print( self ):
print( "Quality: " + str( self.quality ) )
print( "Difficulty: " + str( self.difficulty ) )
print()
print( "Tags:" )
for tag in self.reviewTags:
print( tag )
print()
print( "Review:" )
print( self.reviewBody )
print()
class Professor():
def __init__( self, numReviews, department, firstName, middleName, lastName, overallRating ):
self.numReviews = numReviews
self.department = department
self.firstName = firstName
self.middleName = middleName
self.lastName = lastName
self.fullName = firstName + " " + lastName
self.overallRating = overallRating
self.reviews = []
def addReview( self, review ):
self.reviews.append( review )
def print( self ):
print( "Name: " + self.fullName + ":" )
print( "Department: " + self.department )
print( str( self.numReviews ) + " reviews" )
print( "Length of reviews list: " + str( len( self.reviews ) ) )
print( "Overall rating: " + str( self.overallRating ) )
print()
class Encoder( JSONEncoder ):
def default( self, obj ):
return obj.__dict__
def loadJson( fileName ):
print( "Reading scraped information from " + fileName + "." )
with open( fileName, "r" ) as inFile:
profsWithReviews = json.load( inFile )
return profsWithReviews
def storeJson( fileName, profsWithReviews ):
print( "Printing scraped information to " + fileName + "." )
with open( fileName, "w" ) as outFile:
outFile.write( json.dumps( profsWithReviews, cls = Encoder ) )
def scrape():
#This the UT's school ID on RateMyProfessors.
UTid = 1385
#Get the list of professors for UT.
webpage = requests.get( "https://www.ratemyprofessors.com/filter/professor/?&page=1&filter=teacherlastname_sort_s+asc&query=*%3A*&queryoption=TEACHER&queryBy=schoolId&sid=" + str( UTid ) )
#Get the number of professors for UT.
jsonpage = json.loads( webpage.content )
num_professors = jsonpage["remaining"] + 20
print( "Got " + str( num_professors ) + " professors" )
iteration = 0
listOfProfs = []
pageCount = math.ceil( num_professors / 20 )
professors = {}
professorsWithoutReviews = {}
numProfsWithReviews = 0
numProfsWithoutReviews = 0
totalReviews = 0
#Iterate through pages.
for i in range( pageCount ):
time.sleep( 5 )
#Get professors on each page.
profsOnCurrentPage = requests.get( "http://www.ratemyprofessors.com/filter/professor/?&page=" + str( i ) + "&filter=teacherlastname_sort_s+asc&query=*%3A*&queryoption=TEACHER&queryBy=schoolId&sid=" + str( UTid ) )
jsonPage = json.loads( profsOnCurrentPage.content )
currentPageList = jsonPage['professors']
#Add professors to list to professors.
listOfProfs.extend( currentPageList )
#Iterate through each professor.
for j in range( len( currentPageList ) ):
time.sleep( 5 )
#Get overall information about professor.
if currentPageList[j]['overall_rating'] != "N/A":
professorP = Professor( int( currentPageList[j]['tNumRatings'] ), currentPageList[j]['tDept'], currentPageList[j]['tFname'], currentPageList[j]['tMiddlename'], currentPageList[j]['tLname'], float( currentPageList[j]['overall_rating'] ) )
numProfsWithReviews += 1
else:
professorP = Professor( int( currentPageList[j]['tNumRatings'] ), currentPageList[j]['tDept'], currentPageList[j]['tFname'], currentPageList[j]['tMiddlename'], currentPageList[j]['tLname'], 0.0 )
numProfsWithoutReviews += 1
continue
#Get professor's page.
profURL = "https://www.ratemyprofessors.com/ShowRatings.jsp?tid=" + str(currentPageList[j]['tid'])
profPage = requests.get( profURL )
profInfo = BeautifulSoup( profPage.text, "html.parser" )
#Get reviews. classes: Rating__RatingBody-sc-1rhvpxz-0, dGrvXb
profReviews = profInfo.findAll( "div", { "class": "Rating__RatingBody-sc-1rhvpxz-0 dGrvXb" } )
#Iterate through each review.
for review in profReviews:
#Get div for tags.
tagsDiv = review.findAll( "div", { "class": "RatingTags__StyledTags-sc-1boeqx2-0 eLpnFv" } )
if len( tagsDiv ) > 0:
#Get tags from div. classes: Tag-bs9vf4-0, hHOVKF
tagSpans = tagsDiv[0].findAll( "span", { "class": "Tag-bs9vf4-0 hHOVKF" } )
profTags = []
for span in tagSpans:
profTags.append( span.text )
#Get review body. classes: Comments__StyledComments-dzzyvm-0, gRjWel
bodyDiv = review.findAll( "div", { "class": "Comments__StyledComments-dzzyvm-0 gRjWel" } )
profBody = bodyDiv[0].text
#Get quality rating. classes: RatingValues__RatingValue-sc-6dc747-3, kLWEWI, gQotpy, lbaFTo
qualityDiv = review.findAll( "div", { "class": "RatingValues__RatingValue-sc-6dc747-3 kLWEWI" } )
if len( qualityDiv ) < 1:
qualityDiv = review.findAll( "div", { "class": "RatingValues__RatingValue-sc-6dc747-3 gQotpy" } )
if len( qualityDiv ) < 1:
qualityDiv = review.findAll( "div", { "class": "RatingValues__RatingValue-sc-6dc747-3 lbaFTo" } )
profQuality = qualityDiv[0].text
#Get difficulty rating. classes: RatingValues__RatingValue-sc-6dc747-3, jILzuI
difficultyDiv = review.findAll( "div", { "class": "RatingValues__RatingValue-sc-6dc747-3 jILzuI" } )
profDifficulty = difficultyDiv[0].text
#Create review object and add it to professor object's list.
professorReview = Review( profBody, profTags, profQuality, profDifficulty )
professorP.addReview( professorReview )
print( professorP.fullName + ": " + str( professorP.numReviews ) + " reviews, list: " + str( len( professorP.reviews ) ) )
#Add professor to list of professors.
if len( professorP.reviews ) == 0:
professorsWithoutReviews[professorP.fullName] = professorP
else:
professors[professorP.fullName] = professorP
totalReviews += len( professorP.reviews )
iteration += 1
if iteration % 100 == 0:
print()
print( "Finished " + str( iteration ) + " professors." )
print()
print( "There are " + str( numProfsWithReviews ) + " professors with reviews." )
print( "There are " + str( numProfsWithoutReviews ) + " professors without reviews." )
print( "Got a total of " + str( totalReviews ) + " reviews." )
return professors, numProfsWithReviews, professorsWithoutReviews, numProfsWithoutReviews
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "-s", dest = "scrape", required = True )
parser.add_argument( "-i", dest = "input", required = False )
parser.add_argument( "-o", dest = "output", required = False )
args = parser.parse_args()
if args.scrape.lower() == "true" or args.scrape.lower() == "t":
print( "Scrape is set to true. Calling scrape function." )
if args.output is not None:
profsWR, numProfsWR, profsWNoR, numProfsWNoR = scrape()
storeJson( args.output, profsWR )
else:
print( "File for writing scraped data is not specified. Use -o option with output file name." )
exit()
else:
print( "Scrape is set to false. Calling load function." )
if args.input is not None:
profsWR = loadJson( args.input )
else:
print( "File for loading JSON is not specified. Use -i option with input file name." )
exit()