<?xml version="1.0" encoding="utf-8"?>
<?xml-stylesheet type="text/xsl" href="style/detail_T.xsl"?>
<bibitem type="J">   <ARLID>0641338</ARLID> <utime>20260224163925.4</utime><mtime>20251112235959.9</mtime>   <SCOPUS>105021299046</SCOPUS>  <WOS>001618124500012</WOS>  <DOI>10.1098/rsos.250513</DOI>           <title language="eng" primary="1">Material fingerprinting: predicting human perception of material appearance through psychophysical analysis and neural networks</title>  <specification> <page_count>21 s.</page_count> <media_type>E</media_type> </specification>   <serial><ARLID>cav_un_epca*0475408</ARLID><ISSN>2054-5703</ISSN><title>Royal Society Open Science</title><part_num/><part_title/><volume_id>12</volume_id><volume/><publisher><place/><name>Royal Society Publishing</name><year/></publisher></serial>    <keyword>material</keyword>   <keyword>perception</keyword>   <keyword>feature</keyword>   <keyword>predicting</keyword>   <keyword>neural network</keyword>   <keyword>fingerprint</keyword>    <author primary="1"> <ARLID>cav_un_auth*0101086</ARLID> <name1>Filip</name1> <name2>Jiří</name2> <institution>UTIA-B</institution> <full_dept language="cz">Rozpoznávání obrazu</full_dept> <full_dept language="eng">Department of Pattern Recognition</full_dept> <department language="cz">RO</department> <department language="eng">RO</department> <full_dept>Department of Pattern Recognition</full_dept>  <share>40</share> <garant>K</garant> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0294719</ARLID> <name1>Děchtěrenko</name1> <name2>Filip</name2> <institution>PSU-E</institution> <full_dept language="cz">Kognitivní psychologie</full_dept> <full_dept>Department of Cognitive Psychology</full_dept> <full_dept>Institute of psychology</full_dept> <country>CZ</country>  <share>10</share> <fullinstit>Psychologický ústav AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0340873</ARLID> <name1>Schmidt</name1> <name2>F.</name2> <country>DE</country>  <share>10</share> </author> <author primary="0"> <ARLID>cav_un_auth*0106408</ARLID> <name1>Lukavský</name1> <name2>Jiří</name2> <institution>PSU-E</institution> <full_dept language="cz">Kognitivní psychologie</full_dept> <full_dept>Department of Cognitive Psychology</full_dept> <full_dept>Institute of psychology</full_dept> <country>CZ</country>  <share>10</share> <fullinstit>Psychologický ústav AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0293863</ARLID> <name1>Kotera</name1> <name2>Jan</name2> <institution>UTIA-B</institution> <full_dept language="cz">Zpracování obrazové informace</full_dept> <full_dept>Department of Image Processing</full_dept> <department language="cz">ZOI</department> <department>ZOI</department> <full_dept>Department of Image Processing</full_dept> <country>CZ</country>  <share>10</share> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0458638</ARLID> <name1>Vilímovská</name1> <name2>Veronika</name2> <institution>UTIA-B</institution> <full_dept language="cz">Rozpoznávání obrazu</full_dept> <full_dept>Department of Pattern Recognition</full_dept> <department language="cz">RO</department> <department>RO</department> <country>CZ</country>  <share>10</share> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0468180</ARLID> <name1>Fleming</name1> <name2>R. W.</name2> <country>DE</country>  <share>10</share> </author>   <source> <url>https://library.utia.cas.cz/separaty/2025/RO/filip-0641338.pdf</url> </source> <source> <url>https://royalsocietypublishing.org/rsos/article/12/11/250513/234224/Material-fingerprinting-predicting-human</url>  </source>        <cas_special> <project> <project_id>GA22-17529S</project_id> <agency>GA ČR</agency> <country>CZ</country> <ARLID>cav_un_auth*0439849</ARLID> </project>  <abstract language="eng" primary="1">Digital representation of materials is crucial in fields such as virtual reality, industrial design and quality control. However, predicting human perception of materials from image data is challenging due to the complexity of material appearances and the intricacies of human vision. This study introduces a perceptual representation termed the ‘visual fingerprint’, linking image-based measurements of materials to intuitive, human-understandable attributes. We conducted psychophysical studies using standardized video sequences of 347 diverse real-world materials, including fabrics and wood, selected to encompass a broad spectrum of textures, colours and reflective properties. Sixteen key appearance attributes were identified, and over 110 000 human ratings were collected to map perceptual attributes across material categories. By integrating CLIP-derived image features with a multi-layer perceptron model, we developed a predictive framework for material perception. Our results demonstrate that human judgements of appearance and similarity can be accurately predicted using only two images of a material. This work offers a practical and interpretable approach to material representation, enabling intuitive comparisons and retrievals in applications where material appearance is crucial. The proposed material fingerprint and its prediction directly from image data represent a significant step towards simplifying the understanding and interoperability of material properties in diverse digital environments.</abstract>     <result_subspec>WOS</result_subspec> <RIV>BD</RIV> <FORD0>10000</FORD0> <FORD1>10200</FORD1> <FORD2>10201</FORD2>    <reportyear>2026</reportyear>      <num_of_auth>7</num_of_auth>  <unknown tag="mrcbC47"> PSU-E 50000 50100 50101 </unknown> <unknown tag="mrcbC55"> PSU-E AN </unknown> <inst_support> RVO:67985556 </inst_support> <inst_support> RVO:68081740 </inst_support>  <permalink>https://hdl.handle.net/11104/0371959</permalink>  <cooperation> <ARLID>cav_un_auth*0354022</ARLID> <name>Justus Liebig University Giessen</name> <country>DE</country> </cooperation>  <confidential>S</confidential>  <article_num> 250513 </article_num> <unknown tag="mrcbC91"> A </unknown>         <unknown tag="mrcbT16-e">MULTIDISCIPLINARYSCIENCES</unknown> <unknown tag="mrcbT16-f">3.2</unknown> <unknown tag="mrcbT16-g">0.6</unknown> <unknown tag="mrcbT16-h">5.3</unknown> <unknown tag="mrcbT16-i">0.0227</unknown> <unknown tag="mrcbT16-j">0.922</unknown> <unknown tag="mrcbT16-k">17898</unknown> <unknown tag="mrcbT16-q">92</unknown> <unknown tag="mrcbT16-s">0.795</unknown> <unknown tag="mrcbT16-y">67.64</unknown> <unknown tag="mrcbT16-x">2.99</unknown> <unknown tag="mrcbT16-3">5847</unknown> <unknown tag="mrcbT16-4">Q1</unknown> <unknown tag="mrcbT16-5">2.800</unknown> <unknown tag="mrcbT16-6">642</unknown> <unknown tag="mrcbT16-7">Q2</unknown> <unknown tag="mrcbT16-C">73.2</unknown> <unknown tag="mrcbT16-M">0.9</unknown> <unknown tag="mrcbT16-N">Q1</unknown> <unknown tag="mrcbT16-P">73.2</unknown> <arlyear>2025</arlyear>       <unknown tag="mrcbU14"> 105021299046 SCOPUS </unknown> <unknown tag="mrcbU24"> 41244060 PUBMED </unknown> <unknown tag="mrcbU34"> 001618124500012 WOS </unknown> <unknown tag="mrcbU63"> cav_un_epca*0475408 Royal Society Open Science 12 11 2025 2054-5703 2054-5703 Royal Society Publishing </unknown> </cas_special> </bibitem>