-
Notifications
You must be signed in to change notification settings - Fork 22
/
Copy pathviews.py
147 lines (108 loc) · 3.26 KB
/
views.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import time
import json
from collections import OrderedDict
import jieba
import jieba.analyse
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
USER_DICT = './userdict.txt'
STOPWORDS_FILE = './stopwords.txt'
# Create your views here.
def welcome(request):
"""
welcome page for NLP welcome
:param request:
:return:
"""
return render(request, 'welcome.html')
def word_seg(request):
"""
sentence segmentation by jieba
@Note: supported by jieba
:param request: sentence
:return:
"""
sentence = request.GET.get('sentence')
result = OrderedDict()
if sentence is None:
result['code'] = 1
result['msg'] = 'Invalid Sentence Input'
result['data'] = None
else:
result['code'] = 0
result['msg'] = 'success'
# jieba.load_userdict(USER_DICT)
# jieba.analyse.set_stop_words(STOPWORDS_FILE)
tags = jieba.analyse.extract_tags(sentence, topK=30, withWeight=True)
seg_words = []
tfidf = []
for tag in tags:
seg_words.append(tag[0])
tfidf.append(tag[1])
result['data'] = {
'words': seg_words,
'tfidf': tfidf
}
json_result = json.dumps(result, ensure_ascii=False)
return HttpResponse(json_result)
@csrf_exempt
def sentiment(request):
"""
calculate sentiment of a specific sentence
@Note: currently supported by snownlp, but will be replaced customized model in the near future!
:param request:
:return:
"""
sentence = request.GET.get('sentence')
result = OrderedDict()
tik = time.time()
if sentence is None:
result['code'] = 1
result['msg'] = 'Invalid Sentence Input'
result['data'] = None
else:
result['code'] = 0
result['msg'] = 'success'
from snownlp import SnowNLP
s = SnowNLP(sentence)
senti_score = s.sentiments
if senti_score >= 0.65:
tp = 'positive'
elif senti_score <= 0.4:
tp = 'negative'
else:
tp = 'neutral'
result['data'] = {
'type': tp,
'score': senti_score
}
result['elapse'] = time.time() - tik
json_result = json.dumps(result, ensure_ascii=False)
return HttpResponse(json_result)
def sentiment_view(request):
return render(request, 'sentiment.html')
def hotwords(request):
"""
calculation for wordcloud
@Note: currently supported by jieba, but will be replaced customized model in the near future!
:param request:
:return:
"""
sentence = request.GET.get('sentence')
result = OrderedDict()
tik = time.time()
if sentence is None:
result['code'] = 1
result['msg'] = 'Invalid Sentence Input'
result['data'] = None
else:
result['code'] = 0
result['msg'] = 'success'
words_and_weights = jieba.analyse.textrank(sentence, topK=30, withWeight=True)
result['data'] = words_and_weights
result['elapse'] = time.time() - tik
json_result = json.dumps(result, ensure_ascii=False)
return HttpResponse(json_result)
def hotwords_view(request):
return render(request, 'hotwords.html')