File tree Expand file tree Collapse file tree
Expand file tree Collapse file tree Original file line number Diff line number Diff line change 1515
1616import numpy as np
1717import torch
18- from torch .autograd import Variable
1918
2019
2120def mpjpe_cal (predicted , target ):
@@ -225,13 +224,11 @@ def get_varialbe(split, target):
225224 var = []
226225 if split == "train" :
227226 for i in range (num ):
228- temp = (
229- Variable (target [i ], requires_grad = False ).contiguous ().type (torch .cuda .FloatTensor )
230- )
227+ temp = target [i ].requires_grad_ (False ).contiguous ().type (torch .cuda .FloatTensor )
231228 var .append (temp )
232229 else :
233230 for i in range (num ):
234- temp = Variable ( target [i ]) .contiguous ().cuda ().type (torch .cuda .FloatTensor )
231+ temp = target [i ].contiguous ().cuda ().type (torch .cuda .FloatTensor )
235232 var .append (temp )
236233
237234 return var
Original file line number Diff line number Diff line change 1515
1616import numpy as np
1717import torch
18- from torch .autograd import Variable
1918
2019def deterministic_random (min_value , max_value , data ):
2120 digest = hashlib .sha256 (data .encode ()).digest ()
@@ -191,15 +190,11 @@ def get_varialbe(split, target):
191190 var = []
192191 if split == "train" :
193192 for i in range (num ):
194- temp = (
195- Variable (target [i ], requires_grad = False )
196- .contiguous ()
197- .type (torch .cuda .FloatTensor )
198- )
193+ temp = target [i ].requires_grad_ (False ).contiguous ().type (torch .cuda .FloatTensor )
199194 var .append (temp )
200195 else :
201196 for i in range (num ):
202- temp = Variable ( target [i ]) .contiguous ().cuda ().type (torch .cuda .FloatTensor )
197+ temp = target [i ].contiguous ().cuda ().type (torch .cuda .FloatTensor )
203198 var .append (temp )
204199
205200 return var
You can’t perform that action at this time.
0 commit comments