<?xml version="1.0" encoding="utf-8"?>
<?xml-stylesheet type="text/xsl" href="style/detail_T.xsl"?>
<bibitem type="C">   <ARLID>0534541</ARLID> <utime>20240103224732.5</utime><mtime>20201117235959.9</mtime>   <SCOPUS>85093082913</SCOPUS>  <DOI>10.1007/978-3-030-58526-6_31</DOI>           <title language="eng" primary="1">Stable Low-Rank Tensor Decomposition for Compression of Convolutional Neural Network</title>  <specification> <page_count>18 s.</page_count> <media_type>P</media_type> </specification>   <serial><ARLID>cav_un_epca*0534540</ARLID><ISBN>978-3-030-58525-9</ISBN><ISSN>0302-9743</ISSN><title>ECCV 2020</title><part_num/><part_title>LNCS</part_title><page_num>522-539</page_num><publisher><place>Cham</place><name>Springer Nature Switzerland AG 2020</name><year>2020</year></publisher><editor><name1>Vedaldi</name1><name2>Andrea</name2></editor><editor><name1>Bischof</name1><name2>Horst</name2></editor><editor><name1>Brox</name1><name2>Thomas</name2></editor><editor><name1>Frahm</name1><name2>Jan-Michael</name2></editor></serial>    <keyword>Convolutional neural network acceleration</keyword>   <keyword>Low-rank tensor decomposition</keyword>   <keyword>Degeneracy correction</keyword>    <author primary="1"> <ARLID>cav_un_auth*0382249</ARLID> <name1>Phan</name1> <name2>A. H.</name2> <country>RU</country> </author> <author primary="0"> <ARLID>cav_un_auth*0399439</ARLID> <name1>Sobolev</name1> <name2>K.</name2> <country>RU</country> </author> <author primary="0"> <ARLID>cav_un_auth*0399440</ARLID> <name1>Sozykin</name1> <name2>K.</name2> <country>RU</country> </author> <author primary="0"> <ARLID>cav_un_auth*0399441</ARLID> <name1>Ermilov</name1> <name2>D.</name2> <country>RU</country> </author> <author primary="0"> <ARLID>cav_un_auth*0399442</ARLID> <name1>Gusak</name1> <name2>J.</name2> <country>RU</country> </author> <author primary="0"> <ARLID>cav_un_auth*0101212</ARLID> <name1>Tichavský</name1> <name2>Petr</name2> <institution>UTIA-B</institution> <full_dept language="cz">Stochastická informatika</full_dept> <full_dept>Department of Stochastic Informatics</full_dept> <department language="cz">SI</department> <department>SI</department> <full_dept>Department of Stochastic Informatics</full_dept> <share>10</share> <fullinstit>Ústav teorie informace a automatizace AV ČR, v. v. i.</fullinstit> </author> <author primary="0"> <ARLID>cav_un_auth*0399443</ARLID> <name1>Glukhov</name1> <name2>V.</name2> <country>CN</country> </author> <author primary="0"> <ARLID>cav_un_auth*0399444</ARLID> <name1>Oseledets</name1> <name2>I.</name2> <country>RU</country> </author> <author primary="0"> <ARLID>cav_un_auth*0382250</ARLID> <name1>Cichocki</name1> <name2>A.</name2> <country>RU</country> </author>   <source> <url>http://library.utia.cas.cz/separaty/2020/SI/tichavsky-0534541.pdf</url> </source>        <cas_special>  <abstract language="eng" primary="1">Most state-of-the-art deep neural networks are overparameterized and exhibit a high computational cost. A straightforward approach to this problem is to replace convolutional kernels with its low-rank tensor approximations, whereas the Canonical Polyadic tensor Decomposition is one of the most suited models. However, ﬁtting the convolutional tensors by numerical optimization algorithms often encounters diverging components, i.e.,extremely large rank-one tensors but canceling each other. Such degeneracy often causes the non-interpretable result and numerical instability for the neural network ne-tuning. This paper is the ﬁrst study on degeneracy in the tensor decomposition of convolutional kernels. We present a novel method, which can stabilize the low-rank approximation of convolutional kernels and ensure efficient compression while preserving the high quality performance of the neural networks. We evaluate our approach on popular CNN architectures for image classiﬁcation and show that our method results in much lower accuracy degradation and provides consistent performance. </abstract>    <action target="WRD"> <ARLID>cav_un_auth*0399445</ARLID> <name>European Conference on Computer Vision 2020 /16./</name> <dates>20200823</dates> <unknown tag="mrcbC20-s">20200828</unknown> <place>Glasgow</place> <country>GB</country>  </action>  <RIV>JD</RIV> <FORD0>20000</FORD0> <FORD1>20200</FORD1> <FORD2>20201</FORD2>    <reportyear>2021</reportyear>      <num_of_auth>9</num_of_auth>  <presentation_type> PR </presentation_type> <inst_support> RVO:67985556 </inst_support>  <permalink>http://hdl.handle.net/11104/0313191</permalink>   <confidential>S</confidential>         <arlyear>2020</arlyear>       <unknown tag="mrcbU14"> 85093082913 SCOPUS </unknown> <unknown tag="mrcbU24"> PUBMED </unknown> <unknown tag="mrcbU34"> WOS </unknown> <unknown tag="mrcbU63"> cav_un_epca*0534540 ECCV 2020 978-3-030-58525-9 0302-9743 1611-3349 522 539 Cham Springer Nature Switzerland AG 2020 2020 Lecture Notes in Computer Science 12374 LNCS </unknown> <unknown tag="mrcbU67"> 340 Vedaldi Andrea </unknown> <unknown tag="mrcbU67"> 340 Bischof Horst </unknown> <unknown tag="mrcbU67"> 340 Brox Thomas </unknown> <unknown tag="mrcbU67"> 340 Frahm Jan-Michael </unknown> </cas_special> </bibitem>