Commit e9049638 authored by MattiaPujatti's avatar MattiaPujatti
Browse files

first commit

parent 78f5585a
data
.ipynb_checkpoints/
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
@phdthesis{Virtue:EECS-2019-126,
Author = {Virtue, Patrick},
Title = {Complex-valued Deep Learning with Applications to Magnetic Resonance Image Synthesis},
School = {EECS Department, University of California, Berkeley},
Year = {2019},
Month = {Aug},
URL = {http://www2.eecs.berkeley.edu/Pubs/TechRpts/2019/EECS-2019-126.html},
Number = {UCB/EECS-2019-126},
Abstract = {Magnetic resonance imaging (MRI) has the ability to produce a series of images that each have different visual contrast between tissues, allowing clinicians to qualitatively assess pathologies that may be visible in one contrast-weighted image but not others. Unfortunately, these standard contrast-weighted images do not contain quantitative values, producing challenges for post-processing, assessment, and longitudinal studies. MR fingerprinting is a recent technique that produces quantitative tissue maps from a single pseudorandom acquisition, but it relies on computationally heavy nearest neighbor algorithms to solve the associated nonlinear inverse problem. In this dissertation, we present our deep learning methods to speed up quantitative MR fingerprinting and synthesize the standard contrast-weighted images directly from the same MR fingerprinting scan.
Adapting deep learning methodologies to MR image synthesis presents two specific challenges: 1) complex-valued data and 2) the presence of noise while undersampling.
MRI signals are inherently complex-valued, as they are measurements of rotating magnetization within the body. However, modern neural networks are not designed to support complex values. As an example, the pervasive ReLU activation function is undefined for complex numbers. This limitation curtails the impact of deep learning for complex data applications, such as MRI, radio frequency modulation identification, and target recognition in synthetic-aperture radar images. In this dissertation, we discuss the motivation for complex-valued networks, the changes that we have made to implement complex backpropagation, and our new complex cardioid activation function that made it possible to outperform real-valued networks for MR fingerprinting image synthesis.
In Fourier-based medical imaging, undersampling results in an underdetermined system, in which a linear reconstruction will exhibit artifacts. Another consequence is lower
signal-to-noise ratio (SNR) because of fewer acquired measurements. The coupled effects of low SNR and underdetermined system during reconstruction makes it difficult to model the signal and analyze image reconstruction algorithms. We demonstrate that neural networks trained only with a Gaussian noise model fail to process in vivo MR fingerprinting data, while our proposed empirical noise model allows neural networks to successfully synthesize quantitative images. Additionally, to better understand the impact of noise on undersampled imaging systems, we present an image quality prediction process that reconstructs fully sampled, fully determined data with noise added to simulate the SNR loss induced by a given undersampling pattern. The resulting prediction image empirically shows the effects of noise in undersampled image reconstruction without any effect from an underdetermined system, allowing MR pulse sequence and reconstruction developers to determine if low SNR, rather than the underdetermined system, is the limiting factor for a successful reconstruction.},
howpublished = "\url{http://www2.eecs.berkeley.edu/Pubs/TechRpts/2019/EECS-2019-126.html}"
}
@misc{kreutzdelgado2009complex,
title={The Complex Gradient Operator and the CR-Calculus},
author={Ken Kreutz-Delgado},
year={2009},
eprint={0906.4835},
archivePrefix={arXiv},
primaryClass={math.OC},
howpublished = "\url{https://arxiv.org/abs/0906.4835}"
}
@misc{ziller2021complexvalued,
title={Complex-valued deep learning with differential privacy},
author={Alexander Ziller and Dmitrii Usynin and Moritz Knolle and Kerstin Hammernik and Daniel Rueckert and Georgios Kaissis},
year={2021},
eprint={2110.03478},
archivePrefix={arXiv},
primaryClass={cs.CR},
howpublished = "\url{https://arxiv.org/abs/2110.03478}"
}
@article{article,
author = {Ragab, Mohamed and Chen, Zhenghua and Wu, Min and Li, Haoliang and Kwoh, Chee-Keong and Yan, Ruqiang and li, Xiaoli},
year = {2020},
month = {07},
pages = {1-1},
title = {Adversarial Multiple-Target Domain Adaptation for Fault Classification},
volume = {PP},
journal = {IEEE Transactions on Instrumentation and Measurement},
doi = {10.1109/TIM.2020.3009341},
howpublished = "\url{https://www.researchgate.net/publication/342974923_Adversarial_Multiple-Target_Domain_Adaptation_for_Fault_Classification}"
}
@article{4682548,
author={Ollila, Esa},
journal={IEEE Signal Processing Letters},
title={On the Circularity of a Complex Random Variable},
year={2008},
volume={15},
number={},
pages={841-844},
doi={10.1109/LSP.2008.2005050},
howpublished = "\url{https://ieeexplore.ieee.org/document/4682548}"
}
@misc{virtue2017better,
title={Better than Real: Complex-valued Neural Nets for MRI Fingerprinting},
author={Patrick Virtue and Stella X. Yu and Michael Lustig},
year={2017},
eprint={1707.00070},
archivePrefix={arXiv},
primaryClass={cs.CV},
howpublished = "\url{https://arxiv.org/abs/1707.00070}"
}
@article{Dramsch_seismic,
title={Complex-valued neural networks for machine learning on non-stationary physical data},
volume={146},
ISSN={0098-3004},
url={http://dx.doi.org/10.1016/j.cageo.2020.104643},
DOI={10.1016/j.cageo.2020.104643},
publisher={Elsevier BV},
author={Dramsch, Jesper Sören and Lüthje, Mikael and Christensen, Anders Nymark},
year={2021},
month={Jan},
pages={104643},
howpublished = "\url{https://arxiv.org/abs/1905.12321}"
}
@misc{guberman2016complex,
title={On Complex Valued Convolutional Neural Networks},
author={Nitzan Guberman},
year={2016},
eprint={1602.09046},
archivePrefix={arXiv},
primaryClass={cs.NE},
howpublished = "\url{https://arxiv.org/abs/1602.09046}"
}
@article{GARDNER2021116245,
title = {Overcoming the problem of repair in structural health monitoring: Metric-informed transfer learning},
journal = {Journal of Sound and Vibration},
volume = {510},
pages = {116245},
year = {2021},
issn = {0022-460X},
doi = {https://doi.org/10.1016/j.jsv.2021.116245},
url = {https://www.sciencedirect.com/science/article/pii/S0022460X21003175},
author = {P. Gardner and L.A. Bull and N. Dervilis and K. Worden},
keywords = {Transfer learning, Domain adaptation, Population-based structural health monitoring},
abstract = {Structural repairs alter the physical properties of a structure, changing its responses, both in terms of its normal condition and of its different damage states. This difference in responses manifests itself as a shift between the pre- and post-repair data distributions, which can be problematic for conventional data-driven approaches to structural health monitoring (SHM), and limits their effectiveness in industrial applications. This limitation occurs typically because approaches assume that the data distribution is the same in training as appears in testing; with an algorithm failing to generalise when this assumption is not true; that is, pre-repair labels no longer apply to the post-repair data. Transfer learning, in the form of domain adaptation, proposes a solution to this issue, by mapping the pre- and post-repair data distributions onto a shared latent space where their distributions are approximately equal, allowing pre-repair label knowledge to be used to classify the post-repair data. This paper demonstrates the applicability of domain adaptation as a method for overcoming the problem of repair on a dataset from a Gnat trainer aircraft. In addition, a novel modification to an existing domain adaptation technique – joint distribution adaptation – is proposed, which seeks to improve the semi-supervised learning phase of the algorithm by considering a metric-informed procedure. The metric-informed joint distribution adaptation algorithm is benchmarked against, and shown to outperform, both conventional data-based approaches and other domain adaptation techniques.},
howpublished = "\url{https://www.sciencedirect.com/science/article/pii/S0022460X21003175}"
}
@misc{ajakan2015domainadversarial,
title={Domain-Adversarial Neural Networks},
author={Hana Ajakan and Pascal Germain and Hugo Larochelle and François Laviolette and Mario Marchand},
year={2015},
eprint={1412.4446},
archivePrefix={arXiv},
primaryClass={stat.ML},
howpublished = "\url{https://arxiv.org/abs/1412.4446}"
}
@article{article,
author = {Ozdagli, Ali and Koutsoukos, Xenofon},
year = {2020},
month = {11},
pages = {9},
title = {Domain Adaptation for Structural Health Monitoring},
volume = {12},
journal = {Annual Conference of the PHM Society},
doi = {10.36001/phmconf.2020.v12i1.1184},
howpublished = "\url{https://www.researchgate.net/publication/349498503_Domain_Adaptation_for_Structural_Health_Monitoring}"
}
@misc{trabelsi2018deep,
title={Deep Complex Networks},
author={Chiheb Trabelsi and Olexa Bilaniuk and Ying Zhang and Dmitriy Serdyuk and Sandeep Subramanian and João Felipe Santos and Soroush Mehri and Negar Rostamzadeh and Yoshua Bengio and Christopher J Pal},
year={2018},
eprint={1705.09792},
archivePrefix={arXiv},
primaryClass={cs.NE},
howpublished = "\url{https://arxiv.org/abs/1705.09792}"
}
@article{article,
author = {Lee, SeonWoo and yu, Hyeontak and Yang, HoJun and Song, InSeo and Choi, JungMu and Yang, JaeHeung and Lim, GangMin and Kim, Kyu-Sung and Choi, ByeongKeun and Kwon, JangWoo},
year = {2021},
month = {02},
pages = {1564},
title = {A Study on Deep Learning Application of Vibration Data and Visualization of Defects for Predictive Maintenance of Gravity Acceleration Equipment},
volume = {11},
journal = {Applied Sciences},
doi = {10.3390/app11041564},
howpublished = "\url{https://www.researchgate.net/publication/349183571_A_Study_on_Deep_Learning_Application_of_Vibration_Data_and_Visualization_of_Defects_for_Predictive_Maintenance_of_Gravity_Acceleration_Equipment}"
}
@misc{ganin2016domainadversarial,
title={Domain-Adversarial Training of Neural Networks},
author={Yaroslav Ganin and Evgeniya Ustinova and Hana Ajakan and Pascal Germain and Hugo Larochelle and François Laviolette and Mario Marchand and Victor Lempitsky},
year={2016},
eprint={1505.07818},
archivePrefix={arXiv},
primaryClass={stat.ML},
howpublished = "\url{https://arxiv.org/abs/1505.07818}"
}
@article{article,
author = {Kim, Taehwan and Adali, Tülay},
year = {2002},
month = {08},
pages = {29-43},
title = {Fully Complex Multi-Layer Perceptron Network for Nonlinear Signal Processing},
volume = {32},
journal = {VLSI Signal Processing},
doi = {10.1023/A:1016359216961},
howpublished = "\url{https://www.researchgate.net/publication/220541319_Fully_Complex_Multi-Layer_Perceptron_Network_for_Nonlinear_Signal_Processing}"
}
@misc{reichert2014neuronal,
title={Neuronal Synchrony in Complex-Valued Deep Networks},
author={David P. Reichert and Thomas Serre},
year={2014},
eprint={1312.6115},
archivePrefix={arXiv},
primaryClass={stat.ML},
howpublished="\url{https://arxiv.org/abs/1312.6115}"
}
@misc{scardapane2018complexvalued,
title={Complex-valued Neural Networks with Non-parametric Activation Functions},
author={Simone Scardapane and Steven Van Vaerenbergh and Amir Hussain and Aurelio Uncini},
year={2018},
eprint={1802.08026},
archivePrefix={arXiv},
primaryClass={cs.NE},
howpublished="\url{https://arxiv.org/abs/1802.08026}"
}
@article{article,
author = {Hirose, Akira and Yoshida, Shotaro},
year = {2012},
month = {06},
pages = {},
title = {Relationship between phase and amplitude generalization errors in complex- and real-valued feedforward neural networks},
volume = {22},
journal = {Neural Computing and Applications},
doi = {10.1007/s00521-012-0960-z},
howpublished="\url{https://www.researchgate.net/publication/257435435_Relationship_between_phase_and_amplitude_generalization_errors_in_complex-_and_real-valued_feedforward_neural_networks}"
}
@techreport{Messerschmitt_stationary_points,
Author = {Messerschmitt, David G.},
Title = {Stationary points of a real-valued function of a complex variable},
Institution = {EECS Department, University of California, Berkeley},
Year = {2006},
Month = {Jun},
URL = {http://www2.eecs.berkeley.edu/Pubs/TechRpts/2006/EECS-2006-93.html},
Number = {UCB/EECS-2006-93},
Abstract = {The optimization problem of maximizing or minimizing some real-valued objective function of a complex variable (or vector of complex variables) arises often in signal processing. For example, the mean-square error is such a function. A challenge that arises is that such a function is often not analytic, and thus not differentiable using the ordinary tools of complex variable theory. This tutorial report shows how this challenge can be bypassed by reformulationg the problem as a function of two real variables (the real and imaginary parts), finding the solution, and then relating this back to complex variables.},
howpublished="\url{https://www2.eecs.berkeley.edu/Pubs/TechRpts/2006/EECS-2006-93.html}"
}
@misc{barrachina2021complexvalued,
title={Complex-Valued vs. Real-Valued Neural Networks for Classification Perspectives: An Example on Non-Circular Data},
author={Jose Agustin Barrachina and Chenfang Ren and Christele Morisseau and Gilles Vieillard and Jean-Philippe Ovarlez},
year={2021},
eprint={2009.08340},
archivePrefix={arXiv},
primaryClass={stat.ML},
howpublished="\url{https://arxiv.org/abs/2009.08340v2}"
}
@misc{shen2018wasserstein,
title={Wasserstein Distance Guided Representation Learning for Domain Adaptation},
author={Jian Shen and Yanru Qu and Weinan Zhang and Yong Yu},
year={2018},
eprint={1707.01217},
archivePrefix={arXiv},
primaryClass={stat.ML},
howpublished="\url{https://arxiv.org/abs/1707.01217}"
}
@software{schlomer_nico_2021_5636188,
author = {Schlömer, Nico},
title = {cplot: Plot complex functions},
month = nov,
year = 2021,
note = {If you use this software, please cite it as below.},
publisher = {Zenodo},
version = {v0.8.1},
doi = {10.5281/zenodo.5636188},
url = {https://doi.org/10.5281/zenodo.5636188}
@book{stein_complex_analysis,
title = {Complex analysis Vol.2.},
author = {Elias M. Stein, Rami Shakarchi},
publisher = {Princeton University Press},
isbn = {9780691113852; 0691113858},
year = {2003},
series = {Princeton lectures in analysis 2},
url = {libgen.li/file.php?md5=b45bd5d0088f0646f64dff3b1ff032dc}
}
@article{Farris_visual_complex_analysis,
doi = {10.2307/2589427},
title = {Visual Complex Analysis.},
author = {Farris, Frank A.; Needham, Tristan},
journal = {American Mathematical Monthly vol. 105 iss. 6},
year = {1998},
month = {jun},
volume = {105},
issue = {6},
page = {570--},
url = {libgen.li/file.php?md5=705c5c5bf625aac961a659e8e18a927f}
}
@article{Nitta_complexBP,
doi = {10.1016/s0893-6080(97)00036-1},
title = {An Extension of the Back-Propagation Algorithm to Complex Numbers},
author = {Tohru Nitta},
journal = {Neural Networks vol. 10 iss. 8},
year = {1997},
month = {nov},
volume = {10},
issue = {8},
page = {1391--1415},
url = {libgen.li/file.php?md5=9a390d441d8e2865121651e1543f7c54}
}
@book{hirose_cvnn,
title = {Complex-Valued Neural Networks: Advances and Applications},
publisher = {Wiley-IEEE Press},
isbn = {9781118344606; 111834460X; 9781118590072; 1118590074},
year = {2013},
url = {libgen.li/file.php?md5=5b4edb98c9c143cbfce6a134cf855942}
}
@inproceedings{amin_wirtinger,
author = {Amin, Faijul and Amin, Muhammad and Al Nuaimi, Ahmed Yarub Hani and Murase, Kazuyuki},
year = {2011},
month = {11},
pages = {550-559},
title = {Wirtinger Calculus Based Gradient Descent and Levenberg-Marquardt Learning Algorithms in Complex-Valued Neural Networks},
volume = {7062},
doi = {10.1007/978-3-642-24955-6_66}
}
@article{Hualiang_nonlinear,
author = {Li, Hualiang and Adali, Tülay},
year = {2008},
month = {12},
pages = {},
title = {Complex-Valued Adaptive Signal Processing Using Nonlinear Functions},
volume = {2008},
journal = {EURASIP J. Adv. Sig. Proc.},
doi = {10.1155/2008/765615}
}
@misc{cogswell2016reducing,
title={Reducing Overfitting in Deep Networks by Decorrelating Representations},
author={Michael Cogswell and Faruk Ahmed and Ross Girshick and Larry Zitnick and Dhruv Batra},
year={2016},
eprint={1511.06068},
archivePrefix={arXiv},
primaryClass={cs.LG}
}
@article{xavier_init,
author = {Glorot, Xavier and Bengio, Y.},
year = {2010},
month = {01},
pages = {249-256},
title = {Understanding the difficulty of training deep feedforward neural networks},
volume = {9},
journal = {Journal of Machine Learning Research - Proceedings Track}
}
@misc{he2015delving,
title={Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification},
author={Kaiming He and Xiangyu Zhang and Shaoqing Ren and Jian Sun},
year={2015},
eprint={1502.01852},
archivePrefix={arXiv},
primaryClass={cs.CV}
}
\relax
\providecommand\hyper@newdestlabel[2]{}
\providecommand\babel@aux[2]{}
\@nameuse{bbl@beforestart}
\providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
\HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
\global\let\oldcontentsline\contentsline
\gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global\let\oldnewlabel\newlabel
\gdef\newlabel#1#2{\newlabelxx{#1}#2}
\gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\ifx\hyper@anchor\@undefined
\let\contentsline\oldcontentsline
\let\newlabel\oldnewlabel
\fi}
\fi}
\global\let\hyper@last\relax
\gdef\HyperFirstAtBeginDocument#1{#1}
\providecommand\HyField@AuxAddToFields[1]{}
\providecommand\HyField@AuxAddToCoFields[2]{}
\babel@aux{italian}{}
\babel@aux{italian}{}
\gdef \@abspage@last{2}
\documentclass[../main.tex]{subfiles}
\begin{document}
\frontmatter
\begin{titlepage}
\vspace{5mm}
\begin{figure}[hbtp]
\centering
\includegraphics[scale=.13]{../pictures/frontespizio/UNIPD}
\end{figure}
\vspace{5mm}
\begin{center}
{{\huge{\textsc{\bf UNIVERSIT\`A DEGLI STUDI DI PADOVA}}}\\}
\vspace{5mm}
{\Large{\bf Dipartimento di Fisica e Astronomia ``Galileo Galilei''}} \\
\vspace{5mm}
{\Large{\bf DSIP Research Unit - FBK}} \\
\vspace{5mm}
{\Large{\textsc{\bf Master Degree in Physics of Data}}}\\
\vspace{20mm}
{\Large{\textsc{\bf Final Dissertation}}}\\
\vspace{30mm}
\begin{spacing}{3}
{\large\textbf{Complex-Valued Deep Learning for Condition Monitoring}}\\
\end{spacing}
\vspace{8mm}
\end{center}
\vspace{20mm}
\begin{spacing}{2}
\begin{tabular}{ l c c c c cc c c c c l }
{\Large{\bf Thesis supervisor}} &&&&&&&&&&& {\Large{\bf Candidate}}\\
{\Large{\bf Dr. Marco Cristoforetti}} &&&&&&&&&&& {\Large{\bf Mattia Pujatti}}\\
{\Large{\bf Thesis co-supervisor}}\\
{\Large{\bf Prof. Samir Suweis}}\\
\end{tabular}
\end{spacing}
\vspace{15 mm}
\begin{center}
{\Large{\bf Academic Year 2020/2021}}
\end{center}
\end{titlepage}
\clearpage{\pagestyle{empty}\cleardoublepage}
\end{document}
\relax
\providecommand\hyper@newdestlabel[2]{}
\providecommand\babel@aux[2]{}
\@nameuse{bbl@beforestart}
\providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
\HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
\global\let\oldcontentsline\contentsline
\gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global\let\oldnewlabel\newlabel
\gdef\newlabel#1#2{\newlabelxx{#1}#2}
\gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\ifx\hyper@anchor\@undefined
\let\contentsline\oldcontentsline
\let\newlabel\oldnewlabel
\fi}
\fi}
\global\let\hyper@last\relax
\gdef\HyperFirstAtBeginDocument#1{#1}
\providecommand\HyField@AuxAddToFields[1]{}
\providecommand\HyField@AuxAddToCoFields[2]{}
\babel@aux{italian}{}
\babel@aux{italian}{}
\gdef \@abspage@last{1}
This diff is collapsed.
\documentclass[../main.tex]{subfiles}
\begin{document}
\chapter*{Abstract}
At present, the vast majority of deep learning architectures are based on real-valued operations and representations. However, recent works and fundamental theoretical analyses suggest that complex numbers can have richer expressiveness: Many deterministic wave signals, such as seismic, electrical, or vibrational, contain information in their phase, which risks being lost when studied using a real-valued model. However, despite their attractive properties and potential, only recently complex-valued algorithms have started to be introduced in the deep neural networks frameworks.
In this work, we move forward in this direction implementing …
As a first application of this solution, we show the results obtained applying complex-valued deep neural networks for condition monitoring in industrial applications. Different Deep Network architectures have been trained on vibrational signals extracted from sensors attached to gearmotors to detect failures. Finally, we compare the performances obtained with real and complex-valued neural networks.
\end{document}
\ No newline at end of file
\relax
\providecommand\hyper@newdestlabel[2]{}
\providecommand\babel@aux[2]{}
\@nameuse{bbl@beforestart}
\providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
\HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
\global\let\oldcontentsline\contentsline
\gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global\let\oldnewlabel\newlabel
\gdef\newlabel#1#2{\newlabelxx{#1}#2}
\gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\ifx\hyper@anchor\@undefined
\let\contentsline\oldcontentsline
\let\newlabel\oldnewlabel
\fi}
\fi}
\global\let\hyper@last\relax
\gdef\HyperFirstAtBeginDocument#1{#1}
\providecommand\HyField@AuxAddToFields[1]{}
\providecommand\HyField@AuxAddToCoFields[2]{}
\citation{trabelsi2018deep}
\citation{xavier_init}
\citation{he2015delving}
\citation{xavier_init}
\citation{he2015delving}
\babel@aux{english}{}
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Mathematical Proofs}{1}{chapter.1}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\newlabel{app:cmplx_optim}{{1}{1}{Mathematical Proofs}{chapter.1}{}}
\@writefile{toc}{\contentsline {section}{\numberline {1.1}Complex Weights Initialization \cite {trabelsi2018deep}}{1}{section.1.1}\protected@file@percent }
\newlabel{app:weight_init}{{1.1}{1}{Complex Weights Initialization \cite {trabelsi2018deep}}{section.1.1}{}}
\citation{Messerschmitt_stationary_points}
\citation{MESSERSCHMITT_STATIONARY_POINTS}
\@writefile{toc}{\contentsline {section}{\numberline {1.2}Stationary points of a real-valued function of a complex variable \cite {Messerschmitt_stationary_points}}{2}{section.1.2}\protected@file@percent }
\citation{Hualiang_nonlinear}
\citation{HUALIANG_NONLINEAR}
\@writefile{toc}{\contentsline {section}{\numberline {1.3}Steepest complex gradient descent \cite {Hualiang_nonlinear}}{3}{section.1.3}\protected@file@percent }
\gdef \@abspage@last{3}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment