Skip to content

Commit

Permalink
Removing cupy as a dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
tovacinni committed Jul 28, 2021
1 parent dc6c1f4 commit 37b5811
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 12 deletions.
3 changes: 1 addition & 2 deletions infra/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ torch==1.6.0
torchvision==0.7.0
tensorboard
matplotlib
cupy-cuda111
git+https://github.com/tinyobjloader/tinyobjloader.git#subdirectory=python
pybind11
trimesh>=3.0
Expand All @@ -14,4 +13,4 @@ six==1.12.0
moviepy
opencv-python
plyfile
polyscope
polyscope
1 change: 0 additions & 1 deletion sdf-net/lib/models/OctreeSDF.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import math

import numpy as np
import cupy as xp

import torch
import torch.nn as nn
Expand Down
1 change: 0 additions & 1 deletion sdf-net/lib/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import pprint
import logging as log

import cupy
import matplotlib.pyplot
from PIL import Image
import numpy as np
Expand Down
20 changes: 12 additions & 8 deletions sdf-net/lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
import os
import sys
import time
import cupy as cp
import numpy as np
import pprint
import argparse
import torch

# General utilities

Expand Down Expand Up @@ -86,24 +86,28 @@ def colorize_time(elapsed):
class PerfTimer():
def __init__(self, activate=False):
self.prev_time = time.process_time()
self.stream = cp.cuda.Stream.null
self.prev_time_gpu = self.stream.record()
self.start = torch.cuda.Event(enable_timing=True)
self.end = torch.cuda.Event(enable_timing=True)
self.prev_time_gpu = self.start.record()
self.counter = 0
self.activate = activate

def reset(self):
self.counter = 0
self.prev_time = time.process_time()
self.stream = cp.cuda.Stream.null
self.start = torch.cuda.Event(enable_timing=True)
self.end = torch.cuda.Event(enable_timing=True)
self.prev_time_gpu = self.start.record()

def check(self, name=None):
if self.activate:
cpu_time = time.process_time() - self.prev_time
cpu_time = colorize_time(cpu_time)

end = self.stream.record()
end.synchronize()
gpu_time = cp.cuda.get_elapsed_time(self.prev_time_gpu, end) / 1000
self.end.record()
torch.cuda.synchronize()

gpu_time = self.start.elapsed_time(self.end) / 1e3
gpu_time = colorize_time(gpu_time)
if name:
print("CPU Checkpoint {}: {} s".format(name, cpu_time))
Expand All @@ -113,7 +117,7 @@ def check(self, name=None):
print("GPU Checkpoint {}: {} s".format(self.counter, gpu_time))

self.prev_time = time.process_time()
self.prev_time_gpu = self.stream.record()
self.prev_time_gpu = self.start.record()
self.counter += 1
return cpu_time, gpu_time

Expand Down

0 comments on commit 37b5811

Please sign in to comment.