Skip to content

Commit

Permalink
finish NSFW
Browse files Browse the repository at this point in the history
  • Loading branch information
lucasxlu committed Jan 28, 2019
1 parent 6558874 commit cca372f
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 13 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ dm/__pycache__/
nlp/__pycache__/
__pycache__/
pyWeb/
research/imgcensor/model/
Binary file added research/imgcensor/1.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added research/imgcensor/2.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
40 changes: 40 additions & 0 deletions research/imgcensor/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,44 @@ Note: The dataset for pornography recognition is downloaded from [nsfw_data_scra
| porn | 110003 |
| sexy | 18299 |

## Performance
| Model | Accuracy | Precision | Recall |
| :---: |:---: |:---: |:---: |
| DenseNet121 | 93.31% | 90.68% | 89.72% |


## Usage
1. downloaded pretrained model from [Google Drive]()
2. run [inference](./inference.py) by passing your own image
3. model will return json results as:

* Example 1
![1](./1.jpg)

```json
{"message": "success",
"results": [{"prob": 1.0, "type": "sexy"},
{"prob": 0.0, "type": "neutral"},
{"prob": 0.0, "type": "porn"},
{"prob": 0.0, "type": "hentai"},
{"prob": 0.0, "type": "drawings"}],
"status": 0}
```

Note: the returned result indicates that ```sexy``` has the highest
probability (prob=1.0)

* Example 2
![2](./2.jpg)
```json
{"message": "success",
"results": [{"prob": 0.6981, "type": "neutral"},
{"prob": 0.1811, "type": "porn"},
{"prob": 0.1205, "type": "sexy"},
{"prob": 0.0002, "type": "hentai"},
{"prob": 0.0002, "type": "drawings"}],
"status": 0}
```

Note: the returned result indicates that ```neutral``` has the highest
probability (prob=0.6981)
30 changes: 17 additions & 13 deletions research/imgcensor/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from torchvision import models

sys.path.append('../')
from research.imgcensor import cfg
from research.imgcensor.cfg import cfg


class NSFWEstimator:
Expand All @@ -20,14 +20,12 @@ class NSFWEstimator:
"""

def __init__(self, pretrained_model_path):
model = models.resnet18(pretrained=True)
num_ftrs = model.fc.in_features
model.fc = nn.Linear(num_ftrs, cfg['out_num'])
model = models.densenet121(pretrained=True)
num_ftrs = model.classifier.in_features
model.classifier = nn.Linear(num_ftrs, cfg['out_num'])

model = model.float()
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')

# model = nn.DataParallel(model)
model.load_state_dict(torch.load(pretrained_model_path))

# if torch.cuda.device_count() > 1:
Expand All @@ -50,6 +48,13 @@ def __init__(self, pretrained_model_path):
self.device = device
self.model = model
self.topK = 5
self.mapping = {
0: 'drawings',
1: 'hentai',
2: 'neutral',
3: 'porn',
4: 'sexy'
}

def infer(self, img_file):
img = Image.open(img_file)
Expand All @@ -69,24 +74,23 @@ def infer(self, img_file):
outputs = self.model(img)
outputs = F.softmax(outputs, dim=1)

# get TOP-K output labels and corresponding probabilities
topK_prob, topK_label = torch.topk(outputs, self.topK)
prob = topK_prob.to("cpu").detach().numpy().tolist()

_, predicted = torch.max(outputs.data, 1)

return {
'status': 0,
'message': 'success',
'results': [
"status": 0,
"message": "success",
"results": [
{
'age': None,
'prob': round(prob[0][i], 4)
"prob": round(prob[0][i], 4),
"type": self.mapping[int(topK_label[0][i].to("cpu"))],
} for i in range(self.topK)
]
}


if __name__ == '__main__':
nsfw = NSFWEstimator('./model/DenseNet121_NSFW.pth')
pprint(nsfw.infer('./1.jpg'))
pprint(nsfw.infer('./3.jpg'))

0 comments on commit cca372f

Please sign in to comment.