<?xml version="1.0" encoding="utf-8"?>
<?xml-stylesheet type="text/xsl" href="style/detail_T.xsl"?>
<bibitem type="J">   <ARLID>0616904</ARLID> <utime>20250320140311.7</utime><mtime>20250213235959.9</mtime>   <SCOPUS>85216108770</SCOPUS> <WOS>001420733900001</WOS>  <DOI>10.1016/j.dsp.2025.105022</DOI>           <title language="eng" primary="1">Implicit neural representation for image demosaicking</title>  <specification> <page_count>14 s.</page_count> <media_type>P</media_type> </specification>   <serial><ARLID>cav_un_epca*0252719</ARLID><ISSN>1051-2004</ISSN><title>Digital Signal Processing</title><part_num/><part_title/><volume_id>159</volume_id><volume/><publisher><place/><name>Elsevier</name><year/></publisher></serial>    <keyword>Demosaicking</keyword>   <keyword>Implicit neural representation</keyword>   <keyword>Inverse problems</keyword>    <author primary="1"> <ARLID>cav_un_auth*0379363</ARLID> <name1>Kerepecký</name1> <name2>Tomáš</name2> <institution>UTIA-B</institution> <full_dept language="cz">Zpracování obrazové informace</full_dept> <full_dept language="eng">Department of Image Processing</full_dept> <department language="cz">ZOI</department> <department language="eng">ZOI</department> <full_dept>Department of Image Processing</full_dept> <country>CZ</country> <garant>K</garant> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0101209</ARLID> <name1>Šroubek</name1> <name2>Filip</name2> <institution>UTIA-B</institution> <full_dept language="cz">Zpracování obrazové informace</full_dept> <full_dept>Department of Image Processing</full_dept> <department language="cz">ZOI</department> <department>ZOI</department> <full_dept>Department of Image Processing</full_dept> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0101087</ARLID> <name1>Flusser</name1> <name2>Jan</name2> <institution>UTIA-B</institution> <full_dept language="cz">Zpracování obrazové informace</full_dept> <full_dept>Department of Image Processing</full_dept> <department language="cz">ZOI</department> <department>ZOI</department> <full_dept>Department of Image Processing</full_dept> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author>   <source> <url>https://library.utia.cas.cz/separaty/2025/ZOI/kerepecky-0616904.pdf</url> </source> <source> <url>https://www.sciencedirect.com/science/article/pii/S1051200425000442?via%3Dihub</url>  </source>        <cas_special> <project> <project_id>GA25-15933S</project_id> <agency>GA ČR</agency> <ARLID>cav_un_auth*0483946</ARLID> </project>  <abstract language="eng" primary="1">We propose a novel approach to enhance image demosaicking algorithms using implicit neural representations (INR). Our method employs a multi-layer perceptron to encode RGB images, combining original Bayer measurements with an initial estimate from existing demosaicking methods to achieve superior reconstructions. A key innovation is the integration of two loss functions: a Bayer loss for fidelity to sensor data and a complementary loss that regularizes reconstruction using interpolated data from the initial estimate. This combination, along with INR’s inherent ability to capture fine details, enables hig-fidelity reconstructions that incorporate information from both sources. Furthermore, we demonstrate that INR can effectively correct artifacts in state-of-the-art demosaicking methods when input data diverge from the training distribution, such as in cases of noise or blur. This adaptability highlights the transformative potential of INR-based demosaicking, offering a robust solution to this challenging problem.</abstract>       <reportyear>2026</reportyear>  <RIV>IN</RIV>    <result_subspec>WOS</result_subspec> <FORD0>10000</FORD0> <FORD1>10200</FORD1> <FORD2>10201</FORD2>   <num_of_auth>3</num_of_auth>  <inst_support> RVO:67985556 </inst_support>  <permalink>https://hdl.handle.net/11104/0364269</permalink>  <cooperation> <ARLID>cav_un_auth*0329918</ARLID> <name>FJFI ČVUT Praha</name> <country>CZ</country> </cooperation>  <confidential>S</confidential>  <article_num> 105022 </article_num> <unknown tag="mrcbC91"> C </unknown>         <unknown tag="mrcbT16-e">ENGINEERING.ELECTRICAL&amp;ELECTRONIC</unknown> <unknown tag="mrcbT16-f">2.9</unknown> <unknown tag="mrcbT16-g">0.6</unknown> <unknown tag="mrcbT16-h">4.3</unknown> <unknown tag="mrcbT16-i">0.00715</unknown> <unknown tag="mrcbT16-j">0.589</unknown> <unknown tag="mrcbT16-k">7998</unknown> <unknown tag="mrcbT16-q">96</unknown> <unknown tag="mrcbT16-s">0.704</unknown> <unknown tag="mrcbT16-y">45.4</unknown> <unknown tag="mrcbT16-x">3.83</unknown> <unknown tag="mrcbT16-3">4297</unknown> <unknown tag="mrcbT16-4">Q2</unknown> <unknown tag="mrcbT16-5">2.600</unknown> <unknown tag="mrcbT16-6">492</unknown> <unknown tag="mrcbT16-7">Q2</unknown> <unknown tag="mrcbT16-C">57.2</unknown> <unknown tag="mrcbT16-M">0.67</unknown> <unknown tag="mrcbT16-N">Q2</unknown> <unknown tag="mrcbT16-P">57.2</unknown> <arlyear>2025</arlyear>       <unknown tag="mrcbU14"> 85216108770 SCOPUS </unknown> <unknown tag="mrcbU24"> PUBMED </unknown> <unknown tag="mrcbU34"> 001420733900001 WOS </unknown> <unknown tag="mrcbU63"> cav_un_epca*0252719 Digital Signal Processing 159 1 2025 1051-2004 1095-4333 Elsevier </unknown> </cas_special> </bibitem>