-
[pdf]
[arXiv]
[bibtex]@InProceedings{Chahine_2024_CVPR, author = {Chahine, Nicolas and Conde, Marcos V. and Carfora, Daniela and Pacianotto, Gabriel and Pochon, Benoit and Ferradans, Sira and Timofte, Radu and Duan, Zhichao and Xu, Xinrui and Huang, Yipo and Yuan, Quan and Sheng, Xiangfei and Yang, Zhichao and Li, Leida and Fan, Haotian and Kong, Fangyuan and Xu, Yifang and Sun, Wei and Zhang, Weixia and Jiang, Yanwei and Wu, Haoning and Zhang, Zicheng and Jia, Jun and Zhou, Yingjie and Ji, Zhongpeng and Min, Xiongkuo and Lin, Weisi and Zhai, Guangtao and Wang, Xiaoqi and Liu, Junqi and Guo, Zixi and Zhang, Yun and Chen, Zewen and Wang, Wen and Wang, Juan and Li, Bing}, title = {Deep Portrait Quality Assessment. A NTIRE 2024 Challenge Survey}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR) Workshops}, month = {June}, year = {2024}, pages = {6732-6744} }
Deep Portrait Quality Assessment. A NTIRE 2024 Challenge Survey
Abstract
This paper reviews the NTIRE 2024 Portrait Quality Assessment Challenge highlighting the proposed solutions and results. This challenge aims to obtain an efficient deep neural network capable of estimating the perceptual quality of real portrait photos. The methods must generalize to diverse scenes and diverse lighting conditions (indoor outdoor low-light) movement blur and other challenging conditions. In the challenge 140 participants registered and 35 submitted results during the challenge period. The performance of the top 5 submissions is reviewed and provided here as a gauge for the current state-of-the-art in Portrait Quality Assessment.
Related Material