diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3f2c04d --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.idea/ +bin/ +core/__pycache__/ +__pycache__/ \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..8b7544e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {2016} Zheng Zibin + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ModuleTest.py b/ModuleTest.py new file mode 100644 index 0000000..548dd93 --- /dev/null +++ b/ModuleTest.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# @Time : 2017/9/2 13:40 +# @Author : 郑梓斌 + +import core + +if __name__ == '__main__': + core.face_merge(src_img='images/model.jpg', + dst_img='images/20171030175254.jpg', + out_img='images/output.jpg', + face_area=[50, 30, 500, 485], + alpha=0.75, + blur_detail_x=15, + blur_detail_y=10, + mat_multiple=0.95) diff --git a/README.md b/README.md new file mode 100644 index 0000000..6e2e05b --- /dev/null +++ b/README.md @@ -0,0 +1,10 @@ +# 颜如玉 +yry(颜如玉)—— 一个实现人脸融合的算法,可以接近腾讯天天P图疯狂变脸功能的效果 + +# 效果 +国际惯例先放效果对照图,左边为天天p图融合效果,右边为颜如玉融合效果: +![](http://curzbin.oss-cn-shenzhen.aliyuncs.com/compare.jpg) + +# 使用 +python 安装 requirements.txt 依赖后运行 ModuleTest.py + diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..9730dcc --- /dev/null +++ b/core/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# @Time : 2017/9/2 13:40 +# @Author : 郑梓斌 + +from .recognizer import face_points, \ + FACE_POINTS, \ + JAW_END, \ + LEFT_EYE_POINTS, \ + RIGHT_EYE_POINTS, \ + FACE_END, \ + JAW_POINTS, \ + OVERLAY_POINTS, \ + matrix_rectangle +from .triangulation import measure_triangle, affine_triangle, morph_triangle +from .morpher import face_merge diff --git a/core/morpher.py b/core/morpher.py new file mode 100644 index 0000000..1924b63 --- /dev/null +++ b/core/morpher.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +# @Time : 2017/9/2 13:40 +# @Author : 郑梓斌 + +import cv2 +import numpy as np +import time +import os + +import core + + +def transformation_points(src_img, src_points, dst_img, dst_points): + src_points = src_points.astype(np.float64) + dst_points = dst_points.astype(np.float64) + + c1 = np.mean(src_points, axis=0) + c2 = np.mean(dst_points, axis=0) + + src_points -= c1 + dst_points -= c2 + + s1 = np.std(src_points) + s2 = np.std(dst_points) + + src_points /= s1 + dst_points /= s2 + + u, s, vt = np.linalg.svd(src_points.T * dst_points) + r = (u * vt).T + + m = np.vstack([np.hstack(((s2 / s1) * r, c2.T - (s2 / s1) * r * c1.T)), np.matrix([0., 0., 1.])]) + + output = cv2.warpAffine(dst_img, m[:2], + (src_img.shape[1], src_img.shape[0]), + borderMode=cv2.BORDER_TRANSPARENT, + flags=cv2.WARP_INVERSE_MAP) + + return output + + +def tran_matrix(src_img, src_points, dst_img, dst_points): + h = cv2.findHomography(dst_points, src_points) + output = cv2.warpAffine(dst_img, h[0][:2], (src_img.shape[1], src_img.shape[0]), + borderMode=cv2.BORDER_TRANSPARENT, + flags=cv2.WARP_INVERSE_MAP) + + return output + + +def correct_color(img1, img2, landmark): + blur_amount = 0.4 * np.linalg.norm( + np.mean(landmark[core.LEFT_EYE_POINTS], axis=0) + - np.mean(landmark[core.RIGHT_EYE_POINTS], axis=0) + ) + blur_amount = int(blur_amount) + + if blur_amount % 2 == 0: + blur_amount += 1 + + img1_blur = cv2.GaussianBlur(img1, (blur_amount, blur_amount), 0) + img2_blur = cv2.GaussianBlur(img2, (blur_amount, blur_amount), 0) + + img2_blur += (128 * (img2_blur <= 1.0)).astype(img2_blur.dtype) + + return img2.astype(np.float64) * img1_blur.astype(np.float64) / img2_blur.astype(np.float64) + + +def tran_src(src_img, src_points, dst_points, face_area=None): + jaw = core.JAW_END + + dst_list = dst_points \ + + core.matrix_rectangle(face_area[0], face_area[1], face_area[2], face_area[3]) \ + + core.matrix_rectangle(0, 0, src_img.shape[1], src_img.shape[0]) + + src_list = src_points \ + + core.matrix_rectangle(face_area[0], face_area[1], face_area[2], face_area[3]) \ + + core.matrix_rectangle(0, 0, src_img.shape[1], src_img.shape[0]) + + jaw_points = [] + + for i in range(0, jaw): + jaw_points.append(dst_list[i]) + jaw_points.append(src_list[i]) + + warp_jaw = cv2.convexHull(np.array(jaw_points), returnPoints=False) + warp_jaw = warp_jaw.tolist() + + for i in range(0, len(warp_jaw)): + warp_jaw[i] = warp_jaw[i][0] + + warp_jaw.sort() + + if len(warp_jaw) <= jaw: + dst_list = dst_list[jaw - len(warp_jaw):] + src_list = src_list[jaw - len(warp_jaw):] + for i in range(0, len(warp_jaw)): + dst_list[i] = jaw_points[int(warp_jaw[i])] + src_list[i] = jaw_points[int(warp_jaw[i])] + else: + for i in range(0, jaw): + if len(warp_jaw) > jaw and warp_jaw[i] == 2 * i and warp_jaw[i + 1] == 2 * i + 1: + warp_jaw.remove(2 * i) + + dst_list[i] = jaw_points[int(warp_jaw[i])] + + dt = core.measure_triangle(src_img, dst_list) + + res_img = np.zeros(src_img.shape, dtype=src_img.dtype) + + for i in range(0, len(dt)): + t_src = [] + t_dst = [] + + for j in range(0, 3): + t_src.append(src_list[dt[i][j]]) + t_dst.append(dst_list[dt[i][j]]) + + core.affine_triangle(src_img, res_img, t_src, t_dst) + + return res_img + + +def merge_img(src_img, dst_img, dst_matrix, dst_points, blur_detail_x=None, blur_detail_y=None, mat_multiple=None): + face_mask = np.zeros(src_img.shape, dtype=src_img.dtype) + + for group in core.OVERLAY_POINTS: + cv2.fillConvexPoly(face_mask, cv2.convexHull(dst_matrix[group]), (255, 255, 255)) + + r = cv2.boundingRect(np.float32([dst_points[:core.FACE_END]])) + + center = (r[0] + int(r[2] / 2), r[1] + int(r[3] / 2)) + + if mat_multiple: + mat = cv2.getRotationMatrix2D(center, 0, mat_multiple) + face_mask = cv2.warpAffine(face_mask, mat, (face_mask.shape[1], face_mask.shape[0])) + + if blur_detail_x and blur_detail_y: + face_mask = cv2.blur(face_mask, (blur_detail_x, blur_detail_y), center) + + return cv2.seamlessClone(np.uint8(dst_img), src_img, face_mask, center, cv2.NORMAL_CLONE) + + +def morph_img(src_img, src_points, dst_img, dst_points, alpha=0.5): + morph_points = [] + + src_img = src_img.astype(np.float32) + dst_img = dst_img.astype(np.float32) + + res_img = np.zeros(src_img.shape, src_img.dtype) + + for i in range(0, len(src_points)): + x = (1 - alpha) * src_points[i][0] + alpha * dst_points[i][0] + y = (1 - alpha) * src_points[i][1] + alpha * dst_points[i][1] + morph_points.append((x, y)) + + dt = core.measure_triangle(src_img, morph_points) + + for i in range(0, len(dt)): + t1 = [] + t2 = [] + t = [] + + for j in range(0, 3): + t1.append(src_points[dt[i][j]]) + t2.append(dst_points[dt[i][j]]) + t.append(morph_points[dt[i][j]]) + + core.morph_triangle(src_img, dst_img, res_img, t1, t2, t, alpha) + + return res_img + + +def face_merge(dst_img, src_img, out_img, + face_area, alpha=0.75, + skin_buff=0, skin_detail=0, skin_p=0, + blur_detail_x=None, blur_detail_y=None, mat_multiple=None): + src_matrix, src_points, err = core.face_points(src_img) + dst_matrix, dst_points, err = core.face_points(dst_img) + + src_img = cv2.imread(src_img, cv2.IMREAD_COLOR) + dst_img = cv2.imread(dst_img, cv2.IMREAD_COLOR) + + dst_img = transformation_points(src_img, src_matrix[core.FACE_POINTS], + dst_img, dst_matrix[core.FACE_POINTS]) + + trans_file = 'images/' + str(int(time.time() * 1000)) + '.jpg' + cv2.imwrite(trans_file, dst_img) + _, dst_points, err = core.face_points(trans_file) + + dst_img = morph_img(src_img, src_points, dst_img, dst_points, alpha) + + morph_file = 'images/' + str(int(time.time() * 1000)) + '.jpg' + cv2.imwrite(morph_file, dst_img) + dst_matrix, dst_points, err = core.face_points(morph_file) + + src_img = tran_src(src_img, src_points, dst_points, face_area) + + dst_img = merge_img(src_img, dst_img, dst_matrix, dst_points, blur_detail_x, blur_detail_y, mat_multiple) + + os.remove(trans_file) + os.remove(trans_file + '.txt') + + os.remove(morph_file) + os.remove(morph_file + '.txt') + + cv2.imwrite(out_img, dst_img) + + return err diff --git a/core/recognizer.py b/core/recognizer.py new file mode 100644 index 0000000..820acb8 --- /dev/null +++ b/core/recognizer.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +# @Time : 2017/9/2 13:40 +# @Author : 郑梓斌 + +import json +import os + +import requests +import numpy as np + +FACE_POINTS = list(range(0, 83)) +JAW_POINTS = list(range(0, 19)) +LEFT_EYE_POINTS = list(range(19, 29)) +LEFT_BROW_POINTS = list(range(29, 37)) +MOUTH_POINTS = list(range(37, 55)) +NOSE_POINTS = list(range(55, 65)) +RIGHT_EYE_POINTS = list(range(65, 75)) +RIGHT_BROW_POINTS = list(range(75, 83)) + +LEFT_FACE = list(range(0, 10)) + list(range(29, 34)) +RIGHT_FACE = list(range(9, 19)) + list(range(75, 80)) + +JAW_END = 19 +FACE_START = 0 +FACE_END = 83 + +OVERLAY_POINTS = [ + LEFT_FACE, + RIGHT_FACE, + JAW_POINTS, +] + + +def face_points(image): + points = [] + txt = image + '.txt' + + if os.path.isfile(txt): + with open(txt) as file: + for line in file: + points = line + elif os.path.isfile(image): + points = landmarks_by_face__(image) + with open(txt, 'w') as file: + file.write(str(points)) + + faces = json.loads(points)['faces'] + + if len(faces) == 0: + err = 404 + else: + err = 0 + + matrix_list = np.matrix(matrix_marks(faces[0]['landmark'])) + + point_list = [] + for p in matrix_list.tolist(): + point_list.append((int(p[0]), int(p[1]))) + + return matrix_list, point_list, err + + +def landmarks_by_face__(image): + url = 'https://api-cn.faceplusplus.com/facepp/v3/detect' + params = { + 'api_key': 'mezAN9ZYrQ_BQRSN0dWi68-4O_IlkGvC', + 'api_secret': 'yhsXkJ1XbwEX_XJrND11T0z2bDRW4BlN', + 'return_landmark': 1, + } + file = {'image_file': open(image, 'rb')} + + r = requests.post(url=url, files=file, data=params) + + if r.status_code == requests.codes.ok: + return r.content.decode('utf-8') + else: + return r.content + + +def matrix_rectangle(left, top, width, height): + pointer = [ + (left, top), + (left + width / 2, top), + (left + width - 1, top), + (left + width - 1, top + height / 2), + (left, top + height / 2), + (left, top + height - 1), + (left + width / 2, top + height - 1), + (left + width - 1, top + height - 1) + ] + + return pointer + + +def matrix_marks(res): + pointer = [ + [res['contour_left1']['x'], res['contour_left1']['y']], + [res['contour_left2']['x'], res['contour_left2']['y']], + [res['contour_left3']['x'], res['contour_left3']['y']], + [res['contour_left4']['x'], res['contour_left4']['y']], + [res['contour_left5']['x'], res['contour_left5']['y']], + [res['contour_left6']['x'], res['contour_left6']['y']], + [res['contour_left7']['x'], res['contour_left7']['y']], + [res['contour_left8']['x'], res['contour_left8']['y']], + [res['contour_left9']['x'], res['contour_left9']['y']], + [res['contour_chin']['x'], res['contour_chin']['y']], + [res['contour_right9']['x'], res['contour_right9']['y']], + [res['contour_right8']['x'], res['contour_right8']['y']], + [res['contour_right7']['x'], res['contour_right7']['y']], + [res['contour_right6']['x'], res['contour_right6']['y']], + [res['contour_right5']['x'], res['contour_right5']['y']], + [res['contour_right4']['x'], res['contour_right4']['y']], + [res['contour_right3']['x'], res['contour_right3']['y']], + [res['contour_right2']['x'], res['contour_right2']['y']], + [res['contour_right1']['x'], res['contour_right1']['y']], + + [res['left_eye_bottom']['x'], res['left_eye_bottom']['y']], + [res['left_eye_center']['x'], res['left_eye_center']['y']], + [res['left_eye_left_corner']['x'], res['left_eye_left_corner']['y']], + [res['left_eye_lower_left_quarter']['x'], res['left_eye_lower_left_quarter']['y']], + [res['left_eye_lower_right_quarter']['x'], res['left_eye_lower_right_quarter']['y']], + [res['left_eye_pupil']['x'], res['left_eye_pupil']['y']], + [res['left_eye_right_corner']['x'], res['left_eye_right_corner']['y']], + [res['left_eye_top']['x'], res['left_eye_top']['y']], + [res['left_eye_upper_left_quarter']['x'], res['left_eye_upper_left_quarter']['y']], + [res['left_eye_upper_right_quarter']['x'], res['left_eye_upper_right_quarter']['y']], + + [res['left_eyebrow_left_corner']['x'], res['left_eyebrow_left_corner']['y']], + [res['left_eyebrow_upper_left_quarter']['x'], res['left_eyebrow_upper_left_quarter']['y']], + [res['left_eyebrow_upper_middle']['x'], res['left_eyebrow_upper_middle']['y']], + [res['left_eyebrow_upper_right_quarter']['x'], res['left_eyebrow_upper_right_quarter']['y']], + [res['left_eyebrow_right_corner']['x'], res['left_eyebrow_right_corner']['y']], + [res['left_eyebrow_lower_left_quarter']['x'], res['left_eyebrow_lower_left_quarter']['y']], + [res['left_eyebrow_lower_middle']['x'], res['left_eyebrow_lower_middle']['y']], + [res['left_eyebrow_lower_right_quarter']['x'], res['left_eyebrow_lower_right_quarter']['y']], + + [res['mouth_left_corner']['x'], res['mouth_left_corner']['y']], + [res['mouth_lower_lip_bottom']['x'], res['mouth_lower_lip_bottom']['y']], + [res['mouth_lower_lip_left_contour1']['x'], res['mouth_lower_lip_left_contour1']['y']], + [res['mouth_lower_lip_left_contour2']['x'], res['mouth_lower_lip_left_contour2']['y']], + [res['mouth_lower_lip_left_contour3']['x'], res['mouth_lower_lip_left_contour3']['y']], + [res['mouth_lower_lip_right_contour1']['x'], res['mouth_lower_lip_right_contour1']['y']], + [res['mouth_lower_lip_right_contour2']['x'], res['mouth_lower_lip_right_contour2']['y']], + [res['mouth_lower_lip_right_contour3']['x'], res['mouth_lower_lip_right_contour3']['y']], + [res['mouth_lower_lip_top']['x'], res['mouth_lower_lip_top']['y']], + [res['mouth_right_corner']['x'], res['mouth_right_corner']['y']], + [res['mouth_upper_lip_bottom']['x'], res['mouth_upper_lip_bottom']['y']], + [res['mouth_upper_lip_left_contour1']['x'], res['mouth_upper_lip_left_contour1']['y']], + [res['mouth_upper_lip_left_contour2']['x'], res['mouth_upper_lip_left_contour2']['y']], + [res['mouth_upper_lip_left_contour3']['x'], res['mouth_upper_lip_left_contour3']['y']], + [res['mouth_upper_lip_right_contour1']['x'], res['mouth_upper_lip_right_contour1']['y']], + [res['mouth_upper_lip_right_contour2']['x'], res['mouth_upper_lip_right_contour2']['y']], + [res['mouth_upper_lip_right_contour3']['x'], res['mouth_upper_lip_right_contour3']['y']], + [res['mouth_upper_lip_top']['x'], res['mouth_upper_lip_top']['y']], + + [res['nose_contour_left1']['x'], res['nose_contour_left1']['y']], + [res['nose_contour_left2']['x'], res['nose_contour_left2']['y']], + [res['nose_contour_left3']['x'], res['nose_contour_left3']['y']], + [res['nose_contour_lower_middle']['x'], res['nose_contour_lower_middle']['y']], + [res['nose_contour_right1']['x'], res['nose_contour_right1']['y']], + [res['nose_contour_right2']['x'], res['nose_contour_right2']['y']], + [res['nose_contour_right3']['x'], res['nose_contour_right3']['y']], + [res['nose_left']['x'], res['nose_left']['y']], + [res['nose_right']['x'], res['nose_right']['y']], + [res['nose_tip']['x'], res['nose_tip']['y']], + + [res['right_eye_bottom']['x'], res['right_eye_bottom']['y']], + [res['right_eye_center']['x'], res['right_eye_center']['y']], + [res['right_eye_left_corner']['x'], res['right_eye_left_corner']['y']], + [res['right_eye_lower_left_quarter']['x'], res['right_eye_lower_left_quarter']['y']], + [res['right_eye_lower_right_quarter']['x'], res['right_eye_lower_right_quarter']['y']], + [res['right_eye_pupil']['x'], res['right_eye_pupil']['y']], + [res['right_eye_right_corner']['x'], res['right_eye_right_corner']['y']], + [res['right_eye_top']['x'], res['right_eye_top']['y']], + [res['right_eye_upper_left_quarter']['x'], res['right_eye_upper_left_quarter']['y']], + [res['right_eye_upper_right_quarter']['x'], res['right_eye_upper_right_quarter']['y']], + + [res['right_eyebrow_left_corner']['x'], res['right_eyebrow_left_corner']['y']], + [res['right_eyebrow_upper_left_quarter']['x'], res['right_eyebrow_upper_left_quarter']['y']], + [res['right_eyebrow_upper_middle']['x'], res['right_eyebrow_upper_middle']['y']], + [res['right_eyebrow_upper_right_quarter']['x'], res['right_eyebrow_upper_right_quarter']['y']], + [res['right_eyebrow_right_corner']['x'], res['right_eyebrow_right_corner']['y']], + [res['right_eyebrow_lower_left_quarter']['x'], res['right_eyebrow_lower_left_quarter']['y']], + [res['right_eyebrow_lower_middle']['x'], res['right_eyebrow_lower_middle']['y']], + [res['right_eyebrow_lower_right_quarter']['x'], res['right_eyebrow_lower_right_quarter']['y']], + ] + + return pointer diff --git a/core/triangulation.py b/core/triangulation.py new file mode 100644 index 0000000..cb46a0a --- /dev/null +++ b/core/triangulation.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# @Time : 2017/9/2 13:40 +# @Author : 郑梓斌 + +import cv2 +import numpy as np + + +def draw_point(img, p, color): + cv2.circle(img, (p[0], p[1]), 2, color, cv2.FILLED, cv2.LINE_AA, 0) + + +def rect_contains(rect, point): + if point[0] < rect[0]: + return False + elif point[1] < rect[1]: + return False + elif point[0] > rect[2]: + return False + elif point[1] > rect[3]: + return False + return True + + +def measure_triangle(image, points): + rect = (0, 0, image.shape[1], image.shape[0]) + sub_div = cv2.Subdiv2D(rect) + + for p in points: + sub_div.insert(p) + + triangle_list = sub_div.getTriangleList() + + triangle = [] + pt = [] + + for t in triangle_list: + pt.append((t[0], t[1])) + pt.append((t[2], t[3])) + pt.append((t[4], t[5])) + + pt1 = (t[0], t[1]) + pt2 = (t[2], t[3]) + pt3 = (t[4], t[5]) + + if rect_contains(rect, pt1) and rect_contains(rect, pt2) and rect_contains(rect, pt3): + ind = [] + for j in range(0, 3): + for k in range(0, len(points)): + if abs(pt[j][0] - points[k][0]) < 1.0 and abs(pt[j][1] - points[k][1]) < 1.0: + ind.append(k) + if len(ind) == 3: + triangle.append((ind[0], ind[1], ind[2])) + + pt = [] + + return triangle + + +def morph_triangle(src, dst, img, t_src, t_dst, t, alpha): + r1 = cv2.boundingRect(np.float32([t_src])) + r2 = cv2.boundingRect(np.float32([t_dst])) + r = cv2.boundingRect(np.float32([t])) + + t1_rect = [] + t2_rect = [] + t_rect = [] + + for i in range(0, 3): + t_rect.append(((t[i][0] - r[0]), (t[i][1] - r[1]))) + t1_rect.append(((t_src[i][0] - r1[0]), (t_src[i][1] - r1[1]))) + t2_rect.append(((t_dst[i][0] - r2[0]), (t_dst[i][1] - r2[1]))) + + mask = np.zeros((r[3], r[2], 3), dtype=np.float32) + cv2.fillConvexPoly(mask, np.int32(t_rect), (1.0, 1.0, 1.0), 16, 0) + + img1_rect = src[r1[1]:r1[1] + r1[3], r1[0]:r1[0] + r1[2]] + img2_rect = dst[r2[1]:r2[1] + r2[3], r2[0]:r2[0] + r2[2]] + + size = (r[2], r[3]) + + warp_img1 = affine_transform(img1_rect, t1_rect, t_rect, size) + warp_img2 = affine_transform(img2_rect, t2_rect, t_rect, size) + + img_rect = (1.0 - alpha) * warp_img1 + alpha * warp_img2 + + img[r[1]:r[1] + r[3], r[0]:r[0] + r[2]] = img[r[1]:r[1] + r[3], r[0]:r[0] + r[2]] * (1 - mask) + img_rect * mask + + +def affine_triangle(src, dst, t_src, t_dst): + r1 = cv2.boundingRect(np.float32([t_src])) + r2 = cv2.boundingRect(np.float32([t_dst])) + + t1_rect = [] + t2_rect = [] + t2_rect_int = [] + + for i in range(0, 3): + t1_rect.append((t_src[i][0] - r1[0], t_src[i][1] - r1[1])) + t2_rect.append((t_dst[i][0] - r2[0], t_dst[i][1] - r2[1])) + t2_rect_int.append((t_dst[i][0] - r2[0], t_dst[i][1] - r2[1])) + + mask = np.zeros((r2[3], r2[2], 3), dtype=np.float32) + cv2.fillConvexPoly(mask, np.int32(t2_rect_int), (1.0, 1.0, 1.0), 16, 0) + + img1_rect = src[r1[1]:r1[1] + r1[3], r1[0]:r1[0] + r1[2]] + + size = (r2[2], r2[3]) + + img2_rect = affine_transform(img1_rect, t1_rect, t2_rect, size) + img2_rect = img2_rect * mask + + dst[r2[1]:r2[1] + r2[3], r2[0]:r2[0] + r2[2]] = dst[r2[1]:r2[1] + r2[3], r2[0]:r2[0] + r2[2]] * ( + (1.0, 1.0, 1.0) - mask) + + dst[r2[1]:r2[1] + r2[3], r2[0]:r2[0] + r2[2]] = dst[r2[1]:r2[1] + r2[3], r2[0]:r2[0] + r2[2]] + img2_rect + + +def affine_transform(src, src_tri, dst_tri, size): + warp_mat = cv2.getAffineTransform(np.float32(src_tri), np.float32(dst_tri)) + + dst = cv2.warpAffine(src, warp_mat, (size[0], size[1]), + None, + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_REFLECT_101) + + return dst diff --git a/images/20171030175254.jpg b/images/20171030175254.jpg new file mode 100644 index 0000000..8d84c5c Binary files /dev/null and b/images/20171030175254.jpg differ diff --git a/images/20171030175254.jpg.txt b/images/20171030175254.jpg.txt new file mode 100644 index 0000000..336ae71 --- /dev/null +++ b/images/20171030175254.jpg.txt @@ -0,0 +1 @@ +{"image_id": "ZjjhzaTe8XKaL+uus3VNnQ==", "request_id": "1510036769,fff2ea42-31b5-4ff8-af90-a4cd4a270c31", "time_used": 197, "faces": [{"landmark": {"mouth_upper_lip_left_contour2": {"y": 283, "x": 187}, "mouth_upper_lip_top": {"y": 282, "x": 210}, "mouth_upper_lip_left_contour1": {"y": 280, "x": 201}, "left_eye_upper_left_quarter": {"y": 202, "x": 158}, "left_eyebrow_lower_middle": {"y": 184, "x": 162}, "mouth_upper_lip_left_contour3": {"y": 288, "x": 192}, "left_eyebrow_lower_left_quarter": {"y": 184, "x": 149}, "right_eyebrow_lower_left_quarter": {"y": 187, "x": 238}, "right_eye_pupil": {"y": 207, "x": 245}, "mouth_lower_lip_right_contour1": {"y": 288, "x": 225}, "mouth_lower_lip_left_contour2": {"y": 294, "x": 183}, "mouth_lower_lip_right_contour3": {"y": 300, "x": 223}, "mouth_lower_lip_right_contour2": {"y": 293, "x": 233}, "contour_chin": {"y": 339, "x": 209}, "contour_left9": {"y": 336, "x": 188}, "left_eye_lower_right_quarter": {"y": 211, "x": 176}, "mouth_lower_lip_top": {"y": 290, "x": 210}, "right_eyebrow_upper_middle": {"y": 176, "x": 250}, "right_eyebrow_left_corner": {"y": 187, "x": 225}, "right_eye_lower_right_quarter": {"y": 211, "x": 255}, "right_eye_bottom": {"y": 213, "x": 247}, "contour_left7": {"y": 314, "x": 155}, "contour_left6": {"y": 299, "x": 143}, "contour_left5": {"y": 283, "x": 134}, "contour_left4": {"y": 264, "x": 129}, "contour_left3": {"y": 246, "x": 125}, "contour_left2": {"y": 227, "x": 124}, "contour_left1": {"y": 209, "x": 123}, "left_eye_lower_left_quarter": {"y": 210, "x": 157}, "contour_right1": {"y": 209, "x": 284}, "contour_right3": {"y": 245, "x": 282}, "contour_right2": {"y": 227, "x": 284}, "mouth_left_corner": {"y": 286, "x": 173}, "contour_right4": {"y": 263, "x": 279}, "contour_right7": {"y": 311, "x": 256}, "left_eyebrow_left_corner": {"y": 185, "x": 137}, "nose_right": {"y": 257, "x": 231}, "nose_tip": {"y": 250, "x": 208}, "contour_right5": {"y": 280, "x": 274}, "nose_contour_lower_middle": {"y": 266, "x": 207}, "right_eye_top": {"y": 203, "x": 246}, "mouth_lower_lip_left_contour3": {"y": 301, "x": 195}, "right_eye_right_corner": {"y": 207, "x": 262}, "mouth_upper_lip_right_contour1": {"y": 279, "x": 218}, "mouth_upper_lip_right_contour2": {"y": 282, "x": 230}, "right_eyebrow_lower_right_quarter": {"y": 186, "x": 263}, "left_eye_left_corner": {"y": 206, "x": 150}, "mouth_right_corner": {"y": 284, "x": 241}, "right_eye_lower_left_quarter": {"y": 212, "x": 238}, "left_eyebrow_right_corner": {"y": 186, "x": 190}, "left_eyebrow_lower_right_quarter": {"y": 186, "x": 176}, "right_eye_center": {"y": 208, "x": 246}, "left_eye_pupil": {"y": 206, "x": 168}, "nose_left": {"y": 257, "x": 184}, "mouth_lower_lip_left_contour1": {"y": 289, "x": 192}, "left_eye_upper_right_quarter": {"y": 203, "x": 177}, "right_eyebrow_lower_middle": {"y": 185, "x": 251}, "left_eye_center": {"y": 207, "x": 167}, "contour_left8": {"y": 327, "x": 170}, "contour_right9": {"y": 335, "x": 228}, "right_eye_left_corner": {"y": 211, "x": 230}, "left_eyebrow_upper_left_quarter": {"y": 175, "x": 148}, "left_eye_bottom": {"y": 212, "x": 166}, "left_eye_right_corner": {"y": 210, "x": 184}, "right_eyebrow_upper_left_quarter": {"y": 178, "x": 236}, "contour_right8": {"y": 324, "x": 243}, "right_eyebrow_right_corner": {"y": 187, "x": 275}, "right_eye_upper_left_quarter": {"y": 205, "x": 237}, "left_eyebrow_upper_middle": {"y": 174, "x": 163}, "right_eyebrow_upper_right_quarter": {"y": 178, "x": 264}, "nose_contour_left1": {"y": 213, "x": 195}, "nose_contour_left2": {"y": 243, "x": 189}, "nose_contour_left3": {"y": 262, "x": 195}, "nose_contour_right1": {"y": 213, "x": 220}, "nose_contour_right2": {"y": 243, "x": 226}, "mouth_lower_lip_bottom": {"y": 303, "x": 210}, "contour_right6": {"y": 296, "x": 266}, "nose_contour_right3": {"y": 262, "x": 220}, "left_eye_top": {"y": 201, "x": 168}, "mouth_upper_lip_right_contour3": {"y": 287, "x": 225}, "left_eyebrow_upper_right_quarter": {"y": 177, "x": 177}, "right_eye_upper_right_quarter": {"y": 204, "x": 255}, "mouth_upper_lip_bottom": {"y": 289, "x": 210}}, "face_rectangle": {"width": 163, "top": 175, "left": 121, "height": 163}, "face_token": "d456a295d43fc8be0fe803d191312aa4"}]} \ No newline at end of file diff --git a/images/model.jpg b/images/model.jpg new file mode 100644 index 0000000..5d423f3 Binary files /dev/null and b/images/model.jpg differ diff --git a/images/model.jpg.txt b/images/model.jpg.txt new file mode 100644 index 0000000..cecfeeb --- /dev/null +++ b/images/model.jpg.txt @@ -0,0 +1 @@ +{"image_id": "DwN5dUspiYfBtU71Rk9YYA==", "request_id": "1510209822,edf6fd59-4a87-4dc7-9673-43000bb5b476", "time_used": 354, "faces": [{"landmark": {"mouth_upper_lip_left_contour2": {"y": 314, "x": 280}, "mouth_upper_lip_top": {"y": 315, "x": 305}, "mouth_upper_lip_left_contour1": {"y": 313, "x": 295}, "left_eye_upper_left_quarter": {"y": 189, "x": 226}, "left_eyebrow_lower_middle": {"y": 173, "x": 229}, "mouth_upper_lip_left_contour3": {"y": 319, "x": 285}, "left_eyebrow_lower_left_quarter": {"y": 173, "x": 209}, "right_eyebrow_lower_left_quarter": {"y": 173, "x": 344}, "right_eye_pupil": {"y": 192, "x": 359}, "mouth_lower_lip_right_contour1": {"y": 322, "x": 326}, "mouth_lower_lip_left_contour2": {"y": 327, "x": 275}, "mouth_lower_lip_right_contour3": {"y": 336, "x": 324}, "mouth_lower_lip_right_contour2": {"y": 325, "x": 335}, "contour_chin": {"y": 384, "x": 308}, "contour_left9": {"y": 378, "x": 278}, "left_eye_lower_right_quarter": {"y": 208, "x": 252}, "mouth_lower_lip_top": {"y": 327, "x": 306}, "right_eyebrow_upper_middle": {"y": 153, "x": 360}, "right_eyebrow_left_corner": {"y": 178, "x": 324}, "right_eye_lower_right_quarter": {"y": 198, "x": 375}, "right_eye_bottom": {"y": 203, "x": 363}, "contour_left7": {"y": 342, "x": 238}, "contour_left6": {"y": 322, "x": 221}, "contour_left5": {"y": 299, "x": 207}, "contour_left4": {"y": 275, "x": 197}, "contour_left3": {"y": 250, "x": 190}, "contour_left2": {"y": 225, "x": 184}, "contour_left1": {"y": 199, "x": 181}, "left_eye_lower_left_quarter": {"y": 204, "x": 224}, "contour_right1": {"y": 191, "x": 423}, "contour_right3": {"y": 243, "x": 418}, "contour_right2": {"y": 217, "x": 422}, "mouth_left_corner": {"y": 314, "x": 264}, "contour_right4": {"y": 268, "x": 411}, "contour_right7": {"y": 336, "x": 374}, "left_eyebrow_left_corner": {"y": 175, "x": 191}, "nose_right": {"y": 274, "x": 334}, "nose_tip": {"y": 279, "x": 302}, "contour_right5": {"y": 292, "x": 402}, "nose_contour_lower_middle": {"y": 293, "x": 304}, "right_eye_top": {"y": 183, "x": 358}, "mouth_lower_lip_left_contour3": {"y": 337, "x": 288}, "right_eye_right_corner": {"y": 191, "x": 385}, "mouth_upper_lip_right_contour1": {"y": 312, "x": 314}, "mouth_upper_lip_right_contour2": {"y": 313, "x": 329}, "right_eyebrow_lower_right_quarter": {"y": 163, "x": 383}, "left_eye_left_corner": {"y": 198, "x": 214}, "mouth_right_corner": {"y": 312, "x": 344}, "right_eye_lower_left_quarter": {"y": 204, "x": 349}, "left_eyebrow_right_corner": {"y": 180, "x": 271}, "left_eyebrow_lower_right_quarter": {"y": 178, "x": 250}, "right_eye_center": {"y": 195, "x": 361}, "left_eye_pupil": {"y": 196, "x": 241}, "nose_left": {"y": 278, "x": 272}, "mouth_lower_lip_left_contour1": {"y": 323, "x": 285}, "left_eye_upper_right_quarter": {"y": 193, "x": 255}, "right_eyebrow_lower_middle": {"y": 167, "x": 364}, "left_eye_center": {"y": 200, "x": 239}, "contour_left8": {"y": 362, "x": 256}, "contour_right9": {"y": 375, "x": 336}, "right_eye_left_corner": {"y": 204, "x": 337}, "left_eyebrow_upper_left_quarter": {"y": 161, "x": 208}, "left_eye_bottom": {"y": 208, "x": 238}, "left_eye_right_corner": {"y": 206, "x": 264}, "right_eyebrow_upper_left_quarter": {"y": 162, "x": 340}, "contour_right8": {"y": 357, "x": 357}, "right_eyebrow_right_corner": {"y": 162, "x": 401}, "right_eye_upper_left_quarter": {"y": 190, "x": 345}, "left_eyebrow_upper_middle": {"y": 160, "x": 231}, "right_eyebrow_upper_right_quarter": {"y": 151, "x": 382}, "nose_contour_left1": {"y": 207, "x": 282}, "nose_contour_left2": {"y": 259, "x": 277}, "nose_contour_left3": {"y": 287, "x": 286}, "nose_contour_right1": {"y": 205, "x": 314}, "nose_contour_right2": {"y": 256, "x": 326}, "mouth_lower_lip_bottom": {"y": 341, "x": 306}, "contour_right6": {"y": 315, "x": 389}, "nose_contour_right3": {"y": 285, "x": 320}, "left_eye_top": {"y": 187, "x": 241}, "mouth_upper_lip_right_contour3": {"y": 318, "x": 324}, "left_eyebrow_upper_right_quarter": {"y": 165, "x": 253}, "right_eye_upper_right_quarter": {"y": 184, "x": 373}, "mouth_upper_lip_bottom": {"y": 322, "x": 305}}, "face_rectangle": {"width": 245, "top": 147, "left": 180, "height": 245}, "face_token": "415a93e6ce46c1c8ed5aaf963cb081f5"}]} \ No newline at end of file diff --git a/images/output.jpg b/images/output.jpg new file mode 100644 index 0000000..f3742f5 Binary files /dev/null and b/images/output.jpg differ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..5ef8f22 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +numpy +opencv_python +requests \ No newline at end of file