-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
62 lines (48 loc) · 1.61 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from nudenet import NudeDetector
from better_profanity import profanity
import cv2
import easyocr
import matplotlib.pyplot as plt
import numpy as np
profanity.load_censor_words()
classifier = NudeDetector()
image_path = ''
isAdultContent = False
isNudeContent = False
prop = classifier.detect(image_path)
print(prop)
def check_Nude_Content(detections, threshold=0.1):
exposed_parts = [
"FEMALE_BREAST_EXPOSED",
"MALE_GENITALIA_EXPOSED",
"FEMALE_GENITALIA_EXPOSED",
"BUTTOCKS_EXPOSED",
"ANUS_EXPOSED",
"FEET_EXPOSED",
]
for detection in detections:
if detection['class'] in exposed_parts and detection['score'] > threshold:
return True
return False
def check_Adult_Content(image_path):
img = cv2.imread(image_path)
# instance text detector
reader = easyocr.Reader(['en'], gpu=False)
# detect text on image
text_ = reader.readtext(img)
threshold = 0.25
# draw bbox and text
for t_, t in enumerate(text_):
print(t[1])
if profanity.contains_profanity(t[1]):
return True
isNudeContent = check_Nude_Content(prop)
print("isNudeContent:", isNudeContent)
isAdultContent = check_Adult_Content(image_path)
print("isAdultContent:", isAdultContent)
# print(profanity.contains_profanity("Have a merry day! :)"))
# batch_size is optional; defaults to 4
# prop = classifier.classify(['path_to_image_1', 'path_to_image_2'], batch_size=BATCH_SIZE)
# print(prop)
# Returns {'path_to_image_1': {'safe': PROBABILITY, 'unsafe': PROBABILITY},
# 'path_to_image_2': {'safe': PROBABILITY, 'unsafe': PROBABILITY}}