File tree Expand file tree Collapse file tree 2 files changed +17
-12
lines changed
tutorials/models/5_giant_graph Expand file tree Collapse file tree 2 files changed +17
-12
lines changed Original file line number Diff line number Diff line change @@ -217,11 +217,11 @@ pipeline {
217217 // unit_test_linux("pytorch", "cpu")
218218 // }
219219 // }
220- // stage("Tutorial test") {
221- // steps {
222- // tutorial_test_linux("mxnet")
223- // }
224- // }
220+ stage(" Tutorial test" ) {
221+ steps {
222+ tutorial_test_linux(" mxnet" )
223+ }
224+ }
225225 }
226226 }
227227 stage(" MXNet GPU" ) {
@@ -243,11 +243,11 @@ pipeline {
243243 // unit_test_linux("pytorch", "cpu")
244244 // }
245245 // }
246- // stage("Tutorial test") {
247- // steps {
248- // tutorial_test_linux("mxnet")
249- // }
250- // }
246+ stage(" Tutorial test" ) {
247+ steps {
248+ tutorial_test_linux(" mxnet" )
249+ }
250+ }
251251 }
252252 }
253253 }
Original file line number Diff line number Diff line change @@ -250,9 +250,9 @@ def forward(self, nf):
250250# dropout probability
251251dropout = 0.2
252252# batch size
253- batch_size = 10000
253+ batch_size = 1000
254254# number of neighbors to sample
255- num_neighbors = 8
255+ num_neighbors = 4
256256# number of epochs
257257num_epochs = 1
258258
@@ -267,6 +267,7 @@ def forward(self, nf):
267267 {'learning_rate' : 0.03 , 'wd' : 0 })
268268
269269for epoch in range (num_epochs ):
270+ i = 0
270271 for nf in dgl .contrib .sampling .NeighborSampler (g , batch_size ,
271272 num_neighbors ,
272273 neighbor_type = 'in' ,
@@ -291,6 +292,10 @@ def forward(self, nf):
291292 # optimization
292293 trainer .step (batch_size = 1 )
293294 print ("Epoch[{}]: loss {}" .format (epoch , loss .asscalar ()))
295+ i += 1
296+ # We only train the model with 32 mini-batches just for demonstration.
297+ if i >= 32 :
298+ break
294299
295300##############################################################################
296301# Control Variate
You can’t perform that action at this time.
0 commit comments