bc.bib

@incollection{M09c,
  author = {I. Markovsky},
  editor = {A. Iske and others},
  title = {Algorithms and literate programs for weighted low-rank approximation with missing data},
  publisher = {Springer},
  year = {2011},
  volume = {3},
  chapter = {12},
  optseries = {Springer Proc. in Mathematics},
  pages = {255--273},
  pdf = {http://eprints.soton.ac.uk/268296/2/missing-data-2x1.pdf},
  software = {http://eprints.soton.ac.uk/268296/7/missing-data.tar},
  doi = {10.1007/978-3-642-16876-5_12},
  optaddendum = {(Citations: 4 in WoS, 9 in GS)}
}
@incollection{dist-chapter,
  author = {I. Markovsky},
  title = {Rank constrained optimization problems in computer vision},
  booktitle = {Regularization, Optimization, Kernels, and Support Vector Machines},
  publisher = {Chapman \& Hall/CRC Machine Learning},
  year = {2014},
  editor = {J. Suykens, M. Signoretto, A. Argyriou},
  series = {Pattern Recognition},
  chapter = {13},
  pages = {293--312},
  pdf = {http://homepages.vub.ac.be/~imarkovs/publications/dist-chapter.pdf},
  isbn = {9781482241396}
}
@incollection{kpca,
  author = {I. Markovsky and K. Usevich},
  title = {Nonlinearly structured low-rank approximation},
  booktitle = {Low-Rank and Sparse Modeling for Visual Analysis},
  publisher = {Springer},
  year = {2014},
  editor = {Yun Raymond Fu},
  optchapter = {},
  pages = {1--22},
  pdf = {http://homepages.vub.ac.be/~imarkovs/publications/kpca.pdf},
  doi = {10.1007/978-3-319-12000-3_1},
  abstract = {Polynomially structured low-rank approximation problems occur in algebraic curve fitting, \eg, conic section fitting, subspace clustering (generalized principal component analysis), and nonlinear and parameter-varying system identification. The maximum likelihood estimation principle applied to these nonlinear models leads to nonconvex optimization problems and yields inconsistent estimators in the errors-in-variables (measurement errors) setting.
We propose a computationally cheap and statistically consistent estimator based on a bias correction procedure, called adjusted least-squares estimation. The method is successfully used for conic section fitting and was recently generalized to algebraic curve fitting. The contribution of this book's chapter is the application of the polynomially structured low-rank approximation problem and, in particular, the adjusted least-squares method to subspace clustering, nonlinear and parameter-varying system identification. The classical in system identification input-output notion of a dynamical model is replaced by the behavioral definition of a model as a set, represented by implicit nonlinear difference equations.},
  keywords = {structured low-rank approximation, conic section fitting, subspace clustering, nonlinear system identification.}
}
@incollection{ident-prague,
  author = {I. Markovsky},
  title = {System identification in the behavioral setting: A structured low-rank approximation approach},
  booktitle = {Latent Variable Analysis and Signal Separation},
  publisher = {Springer},
  volume = {9237},
  series = {Lecture Notes in Computer Science},
  editor = {E. Vincent and others},
  isbn = {978-3-319-22481-7},
  year = {2015},
  pages = {235--242},
  pdf = {http://homepages.vub.ac.be/~imarkovs/publications/ident-prague-2x1.pdf},
  abstract = {System identification is a fast growing research area that encompasses a broad range of problems and solution methods. It is desirable to have a unifying setting and a few common principles that are sufficient to understand the currently existing identification methods. The behavioral approach to system and control, put forward in the mid 80's, is such a unifying setting. Till recently, however, the behavioral approach lacked supporting numerical solution methods. In the last 10 yeas, the structured low-rank approximation setting was used to fulfill this gap. In this paper, we summarize recent progress on methods for system identification in the behavioral setting and pose some open problems. First, we show that errors-in-variables and output error system identification problems are equivalent to Hankel structured low-rank approximation. Then, we outline three generic solution approaches: 1) methods based on local optimization, 2) methods based on convex relaxations, and 3) subspace methods. A specific example of a subspace identification method---data-driven impulse response computation---is presented in full details. In order to achieve the desired unification, the classical ARMAX identification problem should also be formulated as a structured low-rank approximation problem. This is an outstanding open problem.},
  keywords = {system identification; errors-in-variables modeling, behavioral approach; Hankel matrix, low-rank approximation, impulse response estimation, ARMAX identification.}
}
@incollection{KanIshEtAl16,
  author = {Kannan, R. and Ishteva, M. and Drake, B. and Park, H.},
  title = {Bounded Matrix Low Rank Approximation},
  year = {2016},
  isbn = {978-3-662-48330-5},
  booktitle = {Non-negative Matrix Factorization Techniques},
  series = {Signals and Communication Technology},
  editor = {Naik, G. R.},
  doi = {10.1007/978-3-662-48331-2_4},
  url = {http://dx.doi.org/10.1007/978-3-662-48331-2_4},
  publisher = {Springer Berlin Heidelberg},
  pages = {89--118}
}
@incollection{Ish15,
  author = {M. Ishteva},
  title = {Tensors and Latent Variable Models},
  year = {2015},
  isbn = {978-3-319-22481-7},
  booktitle = {Latent Variable Analysis and Signal Separation},
  volume = {9237},
  series = {Lecture Notes in Computer Science},
  editor = {Vincent, E. and Yeredor, A. and Koldovsk\'y, Z. and Tichavsk\'y, P.},
  doi = {10.1007/978-3-319-22482-4_6},
  url = {http://dx.doi.org/10.1007/978-3-319-22482-4_6},
  publisher = {Springer International Publishing},
  keywords = {Latent variable models; Tensor; Low rank},
  pages = {49--55}
}
@incollection{DreGooEtAl15,
  author = {Dreesen, P. and Goossens, T. and Ishteva, M. and De Lathauwer, L. and Schoukens, J.},
  title = {Block-Decoupling Multivariate Polynomials Using the Tensor Block-Term Decomposition},
  year = {2015},
  isbn = {978-3-319-22481-7},
  booktitle = {Latent Variable Analysis and Signal Separation},
  volume = {9237},
  series = {Lecture Notes in Computer Science},
  editor = {Vincent, E. and Yeredor, A. and Koldovsk\'y, Z. and Tichavsk\'y, P.},
  doi = {10.1007/978-3-319-22482-4_2},
  url = {http://dx.doi.org/10.1007/978-3-319-22482-4_2},
  publisher = {Springer International Publishing},
  keywords = {Multivariate polynomials; Multilinear algebra; Tensor decomposition; Block-term decomposition; Waring decomposition},
  pages = {14--21}
}

This file was generated by bibtex2html 1.97.