diff --git a/categories/index.html b/categories/index.html index b48c5abd..b87a1443 100644 --- a/categories/index.html +++ b/categories/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/categories/index.xml b/categories/index.xml index a83d9cc0..ca41c1a1 100644 --- a/categories/index.xml +++ b/categories/index.xml @@ -1 +1 @@ -Categories on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/categories/Recent content in Categories on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Categories on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/categories/Recent content in Categories on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/causal_inference/additive_noise_model/index.html b/code/causal_inference/additive_noise_model/index.html index be50e9c1..1d5c39cc 100644 --- a/code/causal_inference/additive_noise_model/index.html +++ b/code/causal_inference/additive_noise_model/index.html @@ -2,4 +2,4 @@

Additive Noise Model with Dependence Gradients

\ No newline at end of file +

Additive Noise Model with Dependence Gradients

\ No newline at end of file diff --git a/code/causal_inference/additive_noise_model/index.xml b/code/causal_inference/additive_noise_model/index.xml index bdefde2a..f3d7e40b 100644 --- a/code/causal_inference/additive_noise_model/index.xml +++ b/code/causal_inference/additive_noise_model/index.xml @@ -1 +1 @@ -Additive Noise Model with Dependence Gradients on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/causal_inference/additive_noise_model/Recent content in Additive Noise Model with Dependence Gradients on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Additive Noise Model with Dependence Gradients on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/causal_inference/additive_noise_model/Recent content in Additive Noise Model with Dependence Gradients on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/causal_inference/causeme_web/index.html b/code/causal_inference/causeme_web/index.html index 7ad2b067..1d25df52 100644 --- a/code/causal_inference/causeme_web/index.html +++ b/code/causal_inference/causeme_web/index.html @@ -2,4 +2,4 @@

CauseMe: A Web Platform for Causal Models Comparison

\ No newline at end of file +

CauseMe: A Web Platform for Causal Models Comparison

\ No newline at end of file diff --git a/code/causal_inference/causeme_web/index.xml b/code/causal_inference/causeme_web/index.xml index b4e032b8..01697220 100644 --- a/code/causal_inference/causeme_web/index.xml +++ b/code/causal_inference/causeme_web/index.xml @@ -1 +1 @@ -CauseMe: A Web Platform for Causal Models Comparison on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/causal_inference/causeme_web/Recent content in CauseMe: A Web Platform for Causal Models Comparison on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +CauseMe: A Web Platform for Causal Models Comparison on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/causal_inference/causeme_web/Recent content in CauseMe: A Web Platform for Causal Models Comparison on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/causal_inference/cross_kernel_granger_causality/index.html b/code/causal_inference/cross_kernel_granger_causality/index.html index bede189d..73deaf15 100644 --- a/code/causal_inference/cross_kernel_granger_causality/index.html +++ b/code/causal_inference/cross_kernel_granger_causality/index.html @@ -2,4 +2,4 @@

Cross-Kernel Granger Causality

\ No newline at end of file +

Cross-Kernel Granger Causality

\ No newline at end of file diff --git a/code/causal_inference/cross_kernel_granger_causality/index.xml b/code/causal_inference/cross_kernel_granger_causality/index.xml index f07b71a8..5e5547dc 100644 --- a/code/causal_inference/cross_kernel_granger_causality/index.xml +++ b/code/causal_inference/cross_kernel_granger_causality/index.xml @@ -1 +1 @@ -Cross-Kernel Granger Causality on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/causal_inference/cross_kernel_granger_causality/Recent content in Cross-Kernel Granger Causality on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Cross-Kernel Granger Causality on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/causal_inference/cross_kernel_granger_causality/Recent content in Cross-Kernel Granger Causality on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/causal_inference/fair_kernel_learning/index.html b/code/causal_inference/fair_kernel_learning/index.html index 2caf23c4..04c3556a 100644 --- a/code/causal_inference/fair_kernel_learning/index.html +++ b/code/causal_inference/fair_kernel_learning/index.html @@ -2,4 +2,4 @@

Fair Kernel Learning

\ No newline at end of file +

Fair Kernel Learning

\ No newline at end of file diff --git a/code/causal_inference/fair_kernel_learning/index.xml b/code/causal_inference/fair_kernel_learning/index.xml index fc363baa..037d5647 100644 --- a/code/causal_inference/fair_kernel_learning/index.xml +++ b/code/causal_inference/fair_kernel_learning/index.xml @@ -1 +1 @@ -Fair Kernel Learning on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/causal_inference/fair_kernel_learning/Recent content in Fair Kernel Learning on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Fair Kernel Learning on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/causal_inference/fair_kernel_learning/Recent content in Fair Kernel Learning on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/causal_inference/index.html b/code/causal_inference/index.html index 2263955a..6857536b 100644 --- a/code/causal_inference/index.html +++ b/code/causal_inference/index.html @@ -2,4 +2,4 @@

Causal inference

Additive Noise Model with Dependence Gradients

Additive Noise Model with Dependence Gradients Image

Kernel dependence measures yield accurate estimates of nonlinear relations between random variables, and they are also endorsed with solid theoretical properties and convergence rates. However, they are hampered by the high computational cost involved, and the interpretability of the measure, which remains hidden behind the implicit feature map. Sensitivity Maps for the Hilbert-Schmidt independence criterion (HSIC) provide a way to explicitly analyze and visualize the relative relevance of both examples and features on the dependence measure.

References
  • Sensitivity maps of the Hilbert–Schmidt independence criterion. Adrián Pérez-Suay and Gustau Camps-Valls. Applied Soft Computing 2017.
  • Causal Inference in Geoscience and Remote Sensing from Observational Data. P’erez-Suay, A. and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing 57 (3): 1502-1513, 2019.

CauseMe: A Web Platform for Causal Models Comparison

CauseMe Platform Image

Detecting causal associations in time series datasets is key for novel insights into complex dynamical systems like the Earth system or the human brain. The CauseMe platform provides benchmark datasets with ground truth, featuring different real data challenges to assess and compare causal discovery methods. The available datasets vary in dimensionality, complexity, and sophistication, allowing researchers to evaluate their methods in a consistent manner.

References
  • Inferring causation from time series with perspectives in Earth system sciences. Runge, J. et al. Nature Communications 2553:1-13, 2019.
  • Causal discovery in Earth system science: State-of-the-art and a new Causality Challenge platform. Runge, J. and Munoz-Marí, J. and Camps-Valls, G. AGU Fall Meeting, Washington, USA 2018.

Cross-Kernel Granger Causality

Cross-Kernel Granger Causality Image

Granger causality (GC) is a widely used approach for detecting causal relations, but it struggles with nonlinearity and nonstationarity. Cross-kernel Granger causality generalizes the method using kernel functions to capture nonlinear cross-relations between variables, addressing the limitations of traditional GC in handling nonstationary processes.

References
  • D. Bueso, M. Piles, and G. Camps-Valls. Cross-Information Kernel Causality: Revisiting global teleconnections of ENSO over soil moisture and vegetation. Proceedings of the 9th International Workshop on Climate Informatics: CI 2019.
  • Revisiting impacts of MJO on soil moisture: a causality perspective. Diego Bueso, Maria Piles, Gustau Camps-Valls. AGU Fall Meeting, San Francisco, USA 2019.

Fair Kernel Learning

Fair Kernel Learning Image

New social and economic activities massively exploit big data and machine learning algorithms to do inference on people’s lives. Applications include automatic curricula evaluation, wage determination, and risk assessment for credits and loans. Recently, many governments and institutions have raised concerns about the lack of fairness, equity and ethics in machine learning to treat these problems. It has been shown that not including sensitive features that bias fairness, such as gender or race, is not enough to mitigate the discrimination when other related features are included. Instead, including fairness in the objective function has been shown to be more efficient.

References
  • Fair Kernel Learning. Adrián Pérez-Suay, Valero Laparra, Gonzalo Mateo-García, Jordi Muñoz-Marí, Luis Gómez-Chova and Gustau Camps-Valls. ECML PKDD 2017 (Accepted).

\ No newline at end of file +

Causal inference

Additive Noise Model with Dependence Gradients

Additive Noise Model with Dependence Gradients Image

Kernel dependence measures yield accurate estimates of nonlinear relations between random variables, and they are also endorsed with solid theoretical properties and convergence rates. However, they are hampered by the high computational cost involved, and the interpretability of the measure, which remains hidden behind the implicit feature map. Sensitivity Maps for the Hilbert-Schmidt independence criterion (HSIC) provide a way to explicitly analyze and visualize the relative relevance of both examples and features on the dependence measure.

References
  • Sensitivity maps of the Hilbert–Schmidt independence criterion. Adrián Pérez-Suay and Gustau Camps-Valls. Applied Soft Computing 2017.
  • Causal Inference in Geoscience and Remote Sensing from Observational Data. P’erez-Suay, A. and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing 57 (3): 1502-1513, 2019.

CauseMe: A Web Platform for Causal Models Comparison

CauseMe Platform Image

Detecting causal associations in time series datasets is key for novel insights into complex dynamical systems like the Earth system or the human brain. The CauseMe platform provides benchmark datasets with ground truth, featuring different real data challenges to assess and compare causal discovery methods. The available datasets vary in dimensionality, complexity, and sophistication, allowing researchers to evaluate their methods in a consistent manner.

References
  • Inferring causation from time series with perspectives in Earth system sciences. Runge, J. et al. Nature Communications 2553:1-13, 2019.
  • Causal discovery in Earth system science: State-of-the-art and a new Causality Challenge platform. Runge, J. and Munoz-Marí, J. and Camps-Valls, G. AGU Fall Meeting, Washington, USA 2018.

Cross-Kernel Granger Causality

Cross-Kernel Granger Causality Image

Granger causality (GC) is a widely used approach for detecting causal relations, but it struggles with nonlinearity and nonstationarity. Cross-kernel Granger causality generalizes the method using kernel functions to capture nonlinear cross-relations between variables, addressing the limitations of traditional GC in handling nonstationary processes.

References
  • D. Bueso, M. Piles, and G. Camps-Valls. Cross-Information Kernel Causality: Revisiting global teleconnections of ENSO over soil moisture and vegetation. Proceedings of the 9th International Workshop on Climate Informatics: CI 2019.
  • Revisiting impacts of MJO on soil moisture: a causality perspective. Diego Bueso, Maria Piles, Gustau Camps-Valls. AGU Fall Meeting, San Francisco, USA 2019.

Fair Kernel Learning

Fair Kernel Learning Image

New social and economic activities massively exploit big data and machine learning algorithms to do inference on people’s lives. Applications include automatic curricula evaluation, wage determination, and risk assessment for credits and loans. Recently, many governments and institutions have raised concerns about the lack of fairness, equity and ethics in machine learning to treat these problems. It has been shown that not including sensitive features that bias fairness, such as gender or race, is not enough to mitigate the discrimination when other related features are included. Instead, including fairness in the objective function has been shown to be more efficient.

References
  • Fair Kernel Learning. Adrián Pérez-Suay, Valero Laparra, Gonzalo Mateo-García, Jordi Muñoz-Marí, Luis Gómez-Chova and Gustau Camps-Valls. ECML PKDD 2017 (Accepted).

\ No newline at end of file diff --git a/code/causal_inference/index.xml b/code/causal_inference/index.xml index 8deb0028..7046d105 100644 --- a/code/causal_inference/index.xml +++ b/code/causal_inference/index.xml @@ -1 +1 @@ -Causal inference on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/causal_inference/Recent content in Causal inference on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Causal inference on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/causal_inference/Recent content in Causal inference on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/altb/index.html b/code/classification_change_anomaly_detect/altb/index.html index 3678a089..9bc3a45a 100644 --- a/code/classification_change_anomaly_detect/altb/index.html +++ b/code/classification_change_anomaly_detect/altb/index.html @@ -2,4 +2,4 @@

ALTB: Active Learning MATLAB(tm) Toolbox

\ No newline at end of file +

ALTB: Active Learning MATLAB(tm) Toolbox

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/altb/index.xml b/code/classification_change_anomaly_detect/altb/index.xml index 65f7711d..d833e9b9 100644 --- a/code/classification_change_anomaly_detect/altb/index.xml +++ b/code/classification_change_anomaly_detect/altb/index.xml @@ -1 +1 @@ -ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/altb/Recent content in ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/altb/Recent content in ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/bagsvm/index.html b/code/classification_change_anomaly_detect/bagsvm/index.html index 583526e3..3590389a 100644 --- a/code/classification_change_anomaly_detect/bagsvm/index.html +++ b/code/classification_change_anomaly_detect/bagsvm/index.html @@ -2,4 +2,4 @@

BagSVM: Bag Support Vector Machine

\ No newline at end of file +

BagSVM: Bag Support Vector Machine

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/bagsvm/index.xml b/code/classification_change_anomaly_detect/bagsvm/index.xml index 5b7de9bd..ea1da66b 100644 --- a/code/classification_change_anomaly_detect/bagsvm/index.xml +++ b/code/classification_change_anomaly_detect/bagsvm/index.xml @@ -1 +1 @@ -BagSVM: Bag Support Vector Machine on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/bagsvm/Recent content in BagSVM: Bag Support Vector Machine on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +BagSVM: Bag Support Vector Machine on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/bagsvm/Recent content in BagSVM: Bag Support Vector Machine on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/graph_kernel/index.html b/code/classification_change_anomaly_detect/graph_kernel/index.html index 4148fe7f..a9b38c28 100644 --- a/code/classification_change_anomaly_detect/graph_kernel/index.html +++ b/code/classification_change_anomaly_detect/graph_kernel/index.html @@ -2,4 +2,4 @@

Graph Kernels for Spatio-Spectral Classification

\ No newline at end of file +

Graph Kernels for Spatio-Spectral Classification

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/graph_kernel/index.xml b/code/classification_change_anomaly_detect/graph_kernel/index.xml index 62244bb4..274e2537 100644 --- a/code/classification_change_anomaly_detect/graph_kernel/index.xml +++ b/code/classification_change_anomaly_detect/graph_kernel/index.xml @@ -1 +1 @@ -Graph Kernels for Spatio-Spectral Classification on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/graph_kernel/Recent content in Graph Kernels for Spatio-Spectral Classification on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Graph Kernels for Spatio-Spectral Classification on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/graph_kernel/Recent content in Graph Kernels for Spatio-Spectral Classification on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/hyperlabelme/index.html b/code/classification_change_anomaly_detect/hyperlabelme/index.html index 8fabeb7d..10b47fd8 100644 --- a/code/classification_change_anomaly_detect/hyperlabelme/index.html +++ b/code/classification_change_anomaly_detect/hyperlabelme/index.html @@ -2,4 +2,4 @@

HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers

\ No newline at end of file +

HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/hyperlabelme/index.xml b/code/classification_change_anomaly_detect/hyperlabelme/index.xml index b41338d7..9fff99c9 100644 --- a/code/classification_change_anomaly_detect/hyperlabelme/index.xml +++ b/code/classification_change_anomaly_detect/hyperlabelme/index.xml @@ -1 +1 @@ -HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/hyperlabelme/Recent content in HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/hyperlabelme/Recent content in HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/index.html b/code/classification_change_anomaly_detect/index.html index ec3f5994..ae7fdcf6 100644 --- a/code/classification_change_anomaly_detect/index.html +++ b/code/classification_change_anomaly_detect/index.html @@ -2,4 +2,4 @@

Classification, change and anomaly detection

ALTB: Active Learning MATLAB(tm) Toolbox

Active Learning Toolbox Image

ALTB is a set of tools implementing state-of-the-art active learning algorithms for remote sensing applications.

References
  • Semisupervised classification of remote sensing images with active queries. Munoz-Mari, J., Tuia, D., and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing 50(10): 3751-3763, 2012.
  • Remote sensing image segmentation by active queries. Tuia, D., Muñoz-Marí, J., Camps-Valls, G. Pattern Recognition 45(6): 2180-2192, 2012.

BagSVM: Bag Support Vector Machine

Bag Support Vector Machine Image

A semi-supervised SVM method for the classification of remote sensing images, learning a kernel directly from the image and regularizing the representation with cluster kernels.

References
  • Semisupervised remote sensing image classification with cluster kernels. Tuia, D., Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters 6(2): 224-228, 2009.
  • Spectral clustering with the probabilistic cluster kernel. Emma Izquierdo-Verdiguier, Robert Jenssen, Luis Gómez-Chova, Gustavo Camps-Valls. Neurocomputing 149(C): 1299-1304, 2015.

Graph Kernels for Spatio-Spectral Classification

Graph Kernels for Spatio-Spectral Classification Image

A graph kernel for spatio-spectral remote sensing image classification using support vector machines (SVM), incorporating higher-order relations in the neighborhood for improved classification accuracy.

References
  • Spatio-spectral remote sensing image classification with graph kernels. Camps-Valls, G., Shervashidze, N., and Borgwardt, K.M. IEEE Geoscience and Remote Sensing Letters 7(4): 741-745, 2010.

HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers

HyperLabelMe Platform Image

The Image and Signal Processing (ISP) group at the Universitat de València has harmonized a big database of labeled multi- and hyperspectral images for testing classification algorithms. We have harmonized 43 image datasets, both multi- and hyperspectral, for objective evaluation of algorithms and submitted papers. Researchers can train their algorithms off-line, and evaluate their accuracy on independent spectra test sets. The system returns accuracy and robustness measures, as well as a ranked list of the best methods.

References
  • J. Munoz-Mari et al., ‘HyperLabelMe : A Web Platform for Benchmarking Remote-Sensing Image Classifiers,’ in IEEE Geoscience and Remote Sensing Magazine, vol. 5, no. 4, pp. 79-85, Dec. 2017. doi: 10.1109/MGRS.2017.2762476.

Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection

Kernelized EC-ACD Image

A simple Toolbox for Anomaly Change Detection (ACD) with Gaussianity assumptions and Elliptically Contoured (EC) distributions, and their kernel-based versions.

References
  • A family of kernel anomaly change detectors. Longbotham, N. and Camps-Valls, G. IEEE Whispers, 2015.
  • Robustness analysis of elliptically contoured multi- and hyperspectral change detection algorithms. M. A. Belenguer, Longbotham, N. and Camps-Valls, G. Submitted, 2016.

Large Margin Filtering SVM

Large Margin Filtering SVM Image

A large margin SVM algorithm that learns convolutional filters, applicable to time series analysis and remote sensing image classification.

References
  • Large margin filtering. Flamary, R., Tuia, D., Labbé, B., Camps-Valls, G., Rakotomamonjy, A. IEEE Transactions on Signal Processing 60(2): 648-659, 2012.
  • Learning spatial filters for multispectral image segmentation. Tuia, D., Camps-Valls, G., Flamary, R., Rakotomamonjy, A. Proceedings of MLSP 2010.

Our Modified libSVM

Modified libSVM Image

Precomputed kernels, e-Huber cost function, accuracy assessment, and other useful features for support vector machine methods.

References

Semi-Supervised Graph-Based Classification

Semi-Supervised Graph-Based Classification Image

A graph-based method for semi-supervised learning, successfully applied to hyperspectral image classification. Incorporates contextual information via composite kernels and uses the Nyström method for scalability.

References
  • Semi-supervised graph-based hyperspectral image classification. Camps-Valls, G., Bandos Marsheva, T.V., Zhou, D. IEEE Transactions on Geoscience and Remote Sensing 45(10): 3044-3054, 2007.

simpleClass: Simple Classification Toolbox

simpleClass Toolbox Image

A set of train-test simple educational functions for data classification including LDA, QDA, SVM, decision trees, random forests, and Gaussian process classifiers.

References

UKC: Unsupervised Kernel Change Detection

UKC Image

Implements an automatic change detection algorithm using kmeans and gaussian kernel kmeans for clustering the difference image in feature spaces.

References
  • Unsupervised change detection by kernel clustering. Volpi, M., Tuia, D., Camps-Valls, G., Kanevski, M. Proceedings of SPIE 7830, 2010.
  • Unsupervised change detection in the feature space using kernels. Volpi, M., Tuia, D., Camps-Valls, G., Kanevski, M. IGARSS 2011.

\ No newline at end of file +

Classification, change and anomaly detection

ALTB: Active Learning MATLAB(tm) Toolbox

Active Learning Toolbox Image

ALTB is a set of tools implementing state-of-the-art active learning algorithms for remote sensing applications.

References
  • Semisupervised classification of remote sensing images with active queries. Munoz-Mari, J., Tuia, D., and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing 50(10): 3751-3763, 2012.
  • Remote sensing image segmentation by active queries. Tuia, D., Muñoz-Marí, J., Camps-Valls, G. Pattern Recognition 45(6): 2180-2192, 2012.

BagSVM: Bag Support Vector Machine

Bag Support Vector Machine Image

A semi-supervised SVM method for the classification of remote sensing images, learning a kernel directly from the image and regularizing the representation with cluster kernels.

References
  • Semisupervised remote sensing image classification with cluster kernels. Tuia, D., Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters 6(2): 224-228, 2009.
  • Spectral clustering with the probabilistic cluster kernel. Emma Izquierdo-Verdiguier, Robert Jenssen, Luis Gómez-Chova, Gustavo Camps-Valls. Neurocomputing 149(C): 1299-1304, 2015.

Graph Kernels for Spatio-Spectral Classification

Graph Kernels for Spatio-Spectral Classification Image

A graph kernel for spatio-spectral remote sensing image classification using support vector machines (SVM), incorporating higher-order relations in the neighborhood for improved classification accuracy.

References
  • Spatio-spectral remote sensing image classification with graph kernels. Camps-Valls, G., Shervashidze, N., and Borgwardt, K.M. IEEE Geoscience and Remote Sensing Letters 7(4): 741-745, 2010.

HyperLabelMe: A Web Platform for Benchmarking Remote-Sensing Image Classifiers

HyperLabelMe Platform Image

The Image and Signal Processing (ISP) group at the Universitat de València has harmonized a big database of labeled multi- and hyperspectral images for testing classification algorithms. We have harmonized 43 image datasets, both multi- and hyperspectral, for objective evaluation of algorithms and submitted papers. Researchers can train their algorithms off-line, and evaluate their accuracy on independent spectra test sets. The system returns accuracy and robustness measures, as well as a ranked list of the best methods.

References
  • J. Munoz-Mari et al., ‘HyperLabelMe : A Web Platform for Benchmarking Remote-Sensing Image Classifiers,’ in IEEE Geoscience and Remote Sensing Magazine, vol. 5, no. 4, pp. 79-85, Dec. 2017. doi: 10.1109/MGRS.2017.2762476.

Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection

Kernelized EC-ACD Image

A simple Toolbox for Anomaly Change Detection (ACD) with Gaussianity assumptions and Elliptically Contoured (EC) distributions, and their kernel-based versions.

References
  • A family of kernel anomaly change detectors. Longbotham, N. and Camps-Valls, G. IEEE Whispers, 2015.
  • Robustness analysis of elliptically contoured multi- and hyperspectral change detection algorithms. M. A. Belenguer, Longbotham, N. and Camps-Valls, G. Submitted, 2016.

Large Margin Filtering SVM

Large Margin Filtering SVM Image

A large margin SVM algorithm that learns convolutional filters, applicable to time series analysis and remote sensing image classification.

References
  • Large margin filtering. Flamary, R., Tuia, D., Labbé, B., Camps-Valls, G., Rakotomamonjy, A. IEEE Transactions on Signal Processing 60(2): 648-659, 2012.
  • Learning spatial filters for multispectral image segmentation. Tuia, D., Camps-Valls, G., Flamary, R., Rakotomamonjy, A. Proceedings of MLSP 2010.

Our Modified libSVM

Modified libSVM Image

Precomputed kernels, e-Huber cost function, accuracy assessment, and other useful features for support vector machine methods.

References

Semi-Supervised Graph-Based Classification

Semi-Supervised Graph-Based Classification Image

A graph-based method for semi-supervised learning, successfully applied to hyperspectral image classification. Incorporates contextual information via composite kernels and uses the Nyström method for scalability.

References
  • Semi-supervised graph-based hyperspectral image classification. Camps-Valls, G., Bandos Marsheva, T.V., Zhou, D. IEEE Transactions on Geoscience and Remote Sensing 45(10): 3044-3054, 2007.

simpleClass: Simple Classification Toolbox

simpleClass Toolbox Image

A set of train-test simple educational functions for data classification including LDA, QDA, SVM, decision trees, random forests, and Gaussian process classifiers.

References

UKC: Unsupervised Kernel Change Detection

UKC Image

Implements an automatic change detection algorithm using kmeans and gaussian kernel kmeans for clustering the difference image in feature spaces.

References
  • Unsupervised change detection by kernel clustering. Volpi, M., Tuia, D., Camps-Valls, G., Kanevski, M. Proceedings of SPIE 7830, 2010.
  • Unsupervised change detection in the feature space using kernels. Volpi, M., Tuia, D., Camps-Valls, G., Kanevski, M. IGARSS 2011.

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/index.xml b/code/classification_change_anomaly_detect/index.xml index c0d0db35..5c8a100c 100644 --- a/code/classification_change_anomaly_detect/index.xml +++ b/code/classification_change_anomaly_detect/index.xml @@ -1 +1 @@ -Classification, change and anomaly detection on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/Recent content in Classification, change and anomaly detection on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Classification, change and anomaly detection on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/Recent content in Classification, change and anomaly detection on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/kernelized_ec_acd/index.html b/code/classification_change_anomaly_detect/kernelized_ec_acd/index.html index cfb3445e..6001e6b5 100644 --- a/code/classification_change_anomaly_detect/kernelized_ec_acd/index.html +++ b/code/classification_change_anomaly_detect/kernelized_ec_acd/index.html @@ -2,4 +2,4 @@

Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection

\ No newline at end of file +

Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/kernelized_ec_acd/index.xml b/code/classification_change_anomaly_detect/kernelized_ec_acd/index.xml index 90243a91..31f546eb 100644 --- a/code/classification_change_anomaly_detect/kernelized_ec_acd/index.xml +++ b/code/classification_change_anomaly_detect/kernelized_ec_acd/index.xml @@ -1 +1 @@ -Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/kernelized_ec_acd/Recent content in Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/kernelized_ec_acd/Recent content in Kernelized EC-ACD: Elliptically Contoured Anomaly Change Detection on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/libsvm/index.html b/code/classification_change_anomaly_detect/libsvm/index.html index f83c7316..bbba8ce8 100644 --- a/code/classification_change_anomaly_detect/libsvm/index.html +++ b/code/classification_change_anomaly_detect/libsvm/index.html @@ -2,4 +2,4 @@

Our Modified libSVM

\ No newline at end of file +

Our Modified libSVM

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/libsvm/index.xml b/code/classification_change_anomaly_detect/libsvm/index.xml index 4d2260e5..8a5b2240 100644 --- a/code/classification_change_anomaly_detect/libsvm/index.xml +++ b/code/classification_change_anomaly_detect/libsvm/index.xml @@ -1 +1 @@ -Our Modified libSVM on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/libsvm/Recent content in Our Modified libSVM on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Our Modified libSVM on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/libsvm/Recent content in Our Modified libSVM on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/lmfsvm/index.html b/code/classification_change_anomaly_detect/lmfsvm/index.html index bb2c92d6..543a07e2 100644 --- a/code/classification_change_anomaly_detect/lmfsvm/index.html +++ b/code/classification_change_anomaly_detect/lmfsvm/index.html @@ -2,4 +2,4 @@

Large Margin Filtering SVM

\ No newline at end of file +

Large Margin Filtering SVM

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/lmfsvm/index.xml b/code/classification_change_anomaly_detect/lmfsvm/index.xml index b40be918..ae477b2f 100644 --- a/code/classification_change_anomaly_detect/lmfsvm/index.xml +++ b/code/classification_change_anomaly_detect/lmfsvm/index.xml @@ -1 +1 @@ -Large Margin Filtering SVM on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/lmfsvm/Recent content in Large Margin Filtering SVM on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Large Margin Filtering SVM on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/lmfsvm/Recent content in Large Margin Filtering SVM on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/semi_supervised/index.html b/code/classification_change_anomaly_detect/semi_supervised/index.html index aaa14449..bcabe820 100644 --- a/code/classification_change_anomaly_detect/semi_supervised/index.html +++ b/code/classification_change_anomaly_detect/semi_supervised/index.html @@ -2,4 +2,4 @@

Semi-Supervised Graph-Based Classification

\ No newline at end of file +

Semi-Supervised Graph-Based Classification

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/semi_supervised/index.xml b/code/classification_change_anomaly_detect/semi_supervised/index.xml index 365c5bc7..afe2befc 100644 --- a/code/classification_change_anomaly_detect/semi_supervised/index.xml +++ b/code/classification_change_anomaly_detect/semi_supervised/index.xml @@ -1 +1 @@ -Semi-Supervised Graph-Based Classification on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/semi_supervised/Recent content in Semi-Supervised Graph-Based Classification on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Semi-Supervised Graph-Based Classification on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/semi_supervised/Recent content in Semi-Supervised Graph-Based Classification on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/simpleclass/index.html b/code/classification_change_anomaly_detect/simpleclass/index.html index 3122b7e2..931cec8a 100644 --- a/code/classification_change_anomaly_detect/simpleclass/index.html +++ b/code/classification_change_anomaly_detect/simpleclass/index.html @@ -2,4 +2,4 @@

simpleClass: Simple Classification Toolbox

\ No newline at end of file +

simpleClass: Simple Classification Toolbox

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/simpleclass/index.xml b/code/classification_change_anomaly_detect/simpleclass/index.xml index cfb9c4ad..5f44fe88 100644 --- a/code/classification_change_anomaly_detect/simpleclass/index.xml +++ b/code/classification_change_anomaly_detect/simpleclass/index.xml @@ -1 +1 @@ -simpleClass: Simple Classification Toolbox on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/simpleclass/Recent content in simpleClass: Simple Classification Toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +simpleClass: Simple Classification Toolbox on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/simpleclass/Recent content in simpleClass: Simple Classification Toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/classification_change_anomaly_detect/ukc/index.html b/code/classification_change_anomaly_detect/ukc/index.html index e7f01d9e..5a22c4de 100644 --- a/code/classification_change_anomaly_detect/ukc/index.html +++ b/code/classification_change_anomaly_detect/ukc/index.html @@ -2,4 +2,4 @@

UKC: Unsupervised Kernel Change Detection

\ No newline at end of file +

UKC: Unsupervised Kernel Change Detection

\ No newline at end of file diff --git a/code/classification_change_anomaly_detect/ukc/index.xml b/code/classification_change_anomaly_detect/ukc/index.xml index a2fd756e..8f0727c8 100644 --- a/code/classification_change_anomaly_detect/ukc/index.xml +++ b/code/classification_change_anomaly_detect/ukc/index.xml @@ -1 +1 @@ -UKC: Unsupervised Kernel Change Detection on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/classification_change_anomaly_detect/ukc/Recent content in UKC: Unsupervised Kernel Change Detection on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +UKC: Unsupervised Kernel Change Detection on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/classification_change_anomaly_detect/ukc/Recent content in UKC: Unsupervised Kernel Change Detection on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/ddr/content/index.html b/code/feature_extraction/ddr/content/index.html index e80162f5..87cf9567 100644 --- a/code/feature_extraction/ddr/content/index.html +++ b/code/feature_extraction/ddr/content/index.html @@ -2,5 +2,5 @@

Dimensionality Reduction via Regression (DRR)

This paper introduces a new unsupervised method for dimensionality reduction via regression (DRR). The algorithm belongs to the family of invertible transforms that generalize Principal Component Analysis (PCA) by using curvilinear instead of linear features. DRR identifies the nonlinear features through multivariate regression to ensure the reduction in redundancy between the PCA coefficients, the reduction of the variance of the scores, and the reduction in the reconstruction error. More importantly, unlike other nonlinear dimensionality reduction methods, the invertibility, volume-preservation, and straightforward out-of-sample extension, makes DRR interpretable and easy to apply. Properties of DRR enable learning a broader class of data manifolds than recently proposed Non-linear Principal Components Analysis (NLPCA) and Principal Polynomial Analysis (PPA). The figure below illustrates the behavior of different algorithms in this family: from the rigid (linear) PCA to the flexible Sequential Principal Curves Analysis (SPCA). In the paper, we illustrate the performance of the representation in reducing the dimensionality of hyperspectral images. In particular, we tackle two common problems: processing very high dimensional spectral information such as in image sounding data, and dealing with spatial-spectral image patches of multispectral images. Both settings pose collinearity and ill-determination problems. Evaluation of the expressive power of the features is assessed in terms of truncation error, estimating atmospheric variables, and surface land cover classification error. Results show that DRR outperforms linear PCA and recently proposed invertible extensions based on neural networks (NLPCA) and univariate regressions (PPA).

EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction

\ No newline at end of file diff --git a/code/feature_extraction/epls/index.xml b/code/feature_extraction/epls/index.xml index 76c133b6..d5c97cb6 100644 --- a/code/feature_extraction/epls/index.xml +++ b/code/feature_extraction/epls/index.xml @@ -1 +1 @@ -EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/epls/Recent content in EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/epls/Recent content in EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/hocca/content/index.html b/code/feature_extraction/hocca/content/index.html index 81bbab19..71b55fb7 100644 --- a/code/feature_extraction/hocca/content/index.html +++ b/code/feature_extraction/hocca/content/index.html @@ -2,4 +2,4 @@

Spatio-Chromatic Adaptation via Higher-Order Canonical Correlation Analysis of Natural Images

Independent component and canonical correlation analysis are twogeneral-purpose statistical methods with wide applicability. Inneuroscience, independent component analysis of chromatic naturalimages explains the spatio-chromatic structure of primary corticalreceptive fields in terms of properties of the visual environment.Canonical correlation analysis explains similarly chromatic adaptationto different illuminations. But, as we show in this paper, neither ofthe two methods generalizes well to explain both spatio-chromaticprocessing and adaptation at the same time. We propose a statisticalmethod which combines the desirable properties of independent componentand canonical correlation analysis: It finds independent components ineach data set which, across the two data sets, are related to eachother via linear or higher-order correlations. The new method is aswidely applicable as canonical correlation analysis, and also to morethan two data sets. We call it higher-order canonical correlationanalysis. When applied to chromatic natural images, we found that itprovides a single (unified) statistical framework which accounts forboth spatio-chromatic processing and adaptation. Filters withspatio-chromatic tuning properties as in the primary visual cortexemerged and corresponding-colors psychophysics was reproducedreasonably well. We used the new method to make a theory-driventestable prediction on how the neural response to colored patternsshould change when the illumination changes. We predict shifts in theresponses which are comparable to the shifts reported for chromaticcontrast habituation.

References

Download

\ No newline at end of file +

Spatio-Chromatic Adaptation via Higher-Order Canonical Correlation Analysis of Natural Images

Independent component and canonical correlation analysis are twogeneral-purpose statistical methods with wide applicability. Inneuroscience, independent component analysis of chromatic naturalimages explains the spatio-chromatic structure of primary corticalreceptive fields in terms of properties of the visual environment.Canonical correlation analysis explains similarly chromatic adaptationto different illuminations. But, as we show in this paper, neither ofthe two methods generalizes well to explain both spatio-chromaticprocessing and adaptation at the same time. We propose a statisticalmethod which combines the desirable properties of independent componentand canonical correlation analysis: It finds independent components ineach data set which, across the two data sets, are related to eachother via linear or higher-order correlations. The new method is aswidely applicable as canonical correlation analysis, and also to morethan two data sets. We call it higher-order canonical correlationanalysis. When applied to chromatic natural images, we found that itprovides a single (unified) statistical framework which accounts forboth spatio-chromatic processing and adaptation. Filters withspatio-chromatic tuning properties as in the primary visual cortexemerged and corresponding-colors psychophysics was reproducedreasonably well. We used the new method to make a theory-driventestable prediction on how the neural response to colored patternsshould change when the illumination changes. We predict shifts in theresponses which are comparable to the shifts reported for chromaticcontrast habituation.

References

Download

\ No newline at end of file diff --git a/code/feature_extraction/hocca/index.html b/code/feature_extraction/hocca/index.html index 667285ca..cd07d258 100644 --- a/code/feature_extraction/hocca/index.html +++ b/code/feature_extraction/hocca/index.html @@ -2,4 +2,4 @@

HOCCA: Higher Order Canonical Correlation Analysis

\ No newline at end of file +

HOCCA: Higher Order Canonical Correlation Analysis

\ No newline at end of file diff --git a/code/feature_extraction/hocca/index.xml b/code/feature_extraction/hocca/index.xml index f35671b8..7d41194f 100644 --- a/code/feature_extraction/hocca/index.xml +++ b/code/feature_extraction/hocca/index.xml @@ -1 +1 @@ -HOCCA: Higher Order Canonical Correlation Analysis on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/hocca/Recent content in HOCCA: Higher Order Canonical Correlation Analysis on ISP - Image and Signal Processing groupHugoen-usSpatio-Chromatic Adaptation via Higher-Order Canonical Correlation Analysis of Natural Imageshttps://ipl-uv.github.io/code/feature_extraction/hocca/content/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/code/feature_extraction/hocca/content/ \ No newline at end of file +HOCCA: Higher Order Canonical Correlation Analysis on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/hocca/Recent content in HOCCA: Higher Order Canonical Correlation Analysis on ISP - Image and Signal Processing groupHugoen-usSpatio-Chromatic Adaptation via Higher-Order Canonical Correlation Analysis of Natural Imageshttps://isp.uv.es/github/code/feature_extraction/hocca/content/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/code/feature_extraction/hocca/content/ \ No newline at end of file diff --git a/code/feature_extraction/index.html b/code/feature_extraction/index.html index eebb1ecc..dfed4797 100644 --- a/code/feature_extraction/index.html +++ b/code/feature_extraction/index.html @@ -2,4 +2,4 @@

Feature extraction, dimensionality reduction and manifold learning

DRR: Dimensionality Reduction via Regression

DRR Image

Dimensionality Reduction via Regression (DRR) is a manifold learning technique aimed at removing residual statistical dependence between PCA components due to dataset curvature. DRR predicts PCA coefficients from neighboring coefficients using multivariate regression, generalizing PPA. It advances dimensionality reduction methods by using curves instead of straight lines.

References
  • Dimensionality reduction via regression in hyperspectral imagery. Laparra, V., Malo, J., and Camps-Valls, G. IEEE Journal on Selected Topics in Signal Processing, 9(6):1026-1036, 2015.

EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction

EPLS Image

EPLS (Enhancing Population and Lifetime Sparsity) is an unsupervised feature learning algorithm designed for sparse representations in convolutional neural networks. It is meta-parameter free, simple, and fast.

References
  • Unrolling loopy top-down semantic feedback in convolutional deep networks. Gatta, C., Romero, A., van de Weijer, J. Deep-vision workshop CVPR, 2014.
  • Unsupervised Deep Feature Extraction Of Hyperspectral Images. Romero, A., Gatta, C., Camps-Valls, G. IEEE Workshop on Hyperspectral Image and Signal Processing, Whispers, 2014.
  • Unsupervised Deep Feature Extraction for Remote Sensing Image Classification. Romero, A., Gatta, C., Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 2015.

HOCCA: Higher Order Canonical Correlation Analysis

HOCCA Image

HOCCA is a linear manifold learning technique that applies to datasets from the same source. It finds independent components in each dataset that are related across datasets, thus combining the goals of ICA and CCA.

References
  • Spatio-Chromatic Adaptation via Higher-Order Canonical Correlation Analysis of Natural Images. Gutmann, M.U., Laparra, V., Hyvärinen, A., Malo, J. PLoS ONE, 9(2):e86481, 2014.

KEMA: Kernel Manifold Alignment

KEMA Image

KEMA extends SSMA by using kernel methods for better semantic alignments of multisource data.

References
  • Kernel Manifold Alignment for Domain Adaptation. Tuia, D., Camps-Valls, G. PLoS ONE, 2016.

KSNR: Kernel Signal to Noise Ratio

KSNR Image

KSNR is a feature extraction method that maximizes signal variance while minimizing noise variance in a reproducing kernel Hilbert space (RKHS). It provides noise-free features for dimensionality reduction, outperforming kPCA in correlated noise scenarios.

References
  • Learning with the kernel signal to noise ratio. Gomez-Chova, L., Camps-Valls, G. IEEE International Workshop on Machine Learning for Signal Processing, MLSP, 2012.

OKECA: Optimized Kernel Entropy Component Analysis

OKECA Image

OKECA is a kernel feature extraction method based on entropy estimation in Hilbert spaces. It provides sparse and compact results, useful for data visualization and dimensionality reduction.

References
  • Optimized Kernel Entropy Components. Izquierdo-Verdiguier, E., Laparra, V., Jenssen, R., Gómez-Chova, L., Camps-Valls, G. IEEE Transactions on Neural Networks and Learning Systems, 2016.

PPA: Principal Polynomial Analysis

PPA Image

Principal Polynomial Analysis (PPA) is a manifold learning technique that generalizes PCA by using principal polynomials to capture nonlinear data patterns. It improves PCA’s energy compaction ability, reducing dimensionality reduction errors. PPA defines a manifold-dependent metric that generalizes Mahalanobis distance for curved manifolds.

References
  • Principal polynomial analysis. Laparra, V., Jiménez, S., Tuia, D., Camps-Valls, G., Malo, J. International Journal of Neural Systems, 24(7), 2014.

RBIG: Rotation-Based Iterative Gaussianization

RBIG Image

RBIG is an invertible multivariate Gaussianization transform that uses univariate histogram Gaussianization and multivariate rotation. This method is useful for multivariate PDF estimation and associated applications.

References
  • Iterative gaussianization: From ICA to random rotations. Laparra, V., Camps-Valls, G., Malo, J. IEEE Transactions on Neural Networks, 22(4):537-549, 2011.
  • PCA Gaussianization for one-class remote sensing image classification. Laparra, V., Muñoz-Marí, J., Camps-Valls, G., Malo, J. Proceedings of SPIE, 7477, 2009.
  • PCA Gaussianization for image processing. Laparra, V., Camps-Valls, G., Malo, J. Proceedings - International Conference on Image Processing, ICIP, 2009.

ROCK-PCA: Rotated Complex Kernel PCA for Nonlinear Spatio-Temporal Data Analysis

ROCK-PCA Image

The rotated complex kernel PCA (ROCK-PCA) works in reproducing kernel Hilbert spaces to account for nonlinear processes, operates in the complex domain to handle both spatial and temporal features and time-lagged correlations. It adds an extra rotation for improved flexibility and physical consistency, providing an explicitly resolved spatio-temporal decomposition of Earth and climate data cubes.

References
  • Nonlinear PCA for Spatio-Temporal Analysis of Earth Observation Data. Bueso, D., Piles, M., and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 58(8), 2020.

SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction

SIMFEAT Toolbox Image

SIMFEAT is a toolbox that includes linear and kernel feature extraction methods. Linear methods: PCA, MNF, CCA, PLS, OPLS. Kernel methods: KPCA, KMNF, KCCA, KPLS, KOPLS, KECA.

References
  • Kernel multivariate analysis framework for supervised subspace learning: A tutorial on linear and kernel multivariate methods. Arenas-Garcia, J., Petersen, K.B., Camps-Valls, G., Hansen, L.K. IEEE Signal Processing Magazine, 30(4):16-29, 2013.

SPCA: Sequential Principal Curves Analysis

SPCA Image

SPCA is an invertible manifold learning technique that generalizes PCA by using nonparametric principal curves instead of straight lines. It includes multivariate histogram equalization to fulfill either NonLinear ICA or optimal Vector Quantization.

References
  • Nonlinearities and adaptation of color vision from sequential principal curves analysis. Laparra, V., Jiménez, S., Camps-Valls, G., Malo, J. Neural Computation, 24(10):2751-2788, 2012.

SSKPLS: Semisupervised Kernel Partial Least Squares

SSKPLS Image

SSKPLS utilizes probabilistic cluster kernels for nonlinear feature extraction. It builds kernel functions from data, outperforming standard kernel functions and information theoretic kernels like Fisher and mutual information kernels.

References
  • Spectral clustering with the probabilistic cluster kernel. Izquierdo-Verdiguier, E., Jenssen, R., Gómez-Chova, L., Camps-Valls, G. Neurocomputing, 149(C):1299-1304, 2015.
  • Semisupervised kernel feature extraction for remote sensing image analysis. Izquierdo-Verdiguier, E., Gomez-Chova, L., Bruzzone, L., Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 52(9):5567-5578, 2014.

SSMA: SemiSupervised Manifold Alignment

SSMA Image

The SSMA Toolbox is a MATLAB tool for semisupervised manifold alignment of data without corresponding pairs, requiring only a small set of labeled samples in each domain.

References
  • Semisupervised manifold alignment of multimodal remote sensing images. Tuia, D., Volpi, M., Trolliet, M., Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 52(12):7708-7720, 2014.

\ No newline at end of file +

Feature extraction, dimensionality reduction and manifold learning

DRR: Dimensionality Reduction via Regression

DRR Image

Dimensionality Reduction via Regression (DRR) is a manifold learning technique aimed at removing residual statistical dependence between PCA components due to dataset curvature. DRR predicts PCA coefficients from neighboring coefficients using multivariate regression, generalizing PPA. It advances dimensionality reduction methods by using curves instead of straight lines.

References
  • Dimensionality reduction via regression in hyperspectral imagery. Laparra, V., Malo, J., and Camps-Valls, G. IEEE Journal on Selected Topics in Signal Processing, 9(6):1026-1036, 2015.

EPLS: Unsupervised Sparse Convolutional Neural Networks for Feature Extraction

EPLS Image

EPLS (Enhancing Population and Lifetime Sparsity) is an unsupervised feature learning algorithm designed for sparse representations in convolutional neural networks. It is meta-parameter free, simple, and fast.

References
  • Unrolling loopy top-down semantic feedback in convolutional deep networks. Gatta, C., Romero, A., van de Weijer, J. Deep-vision workshop CVPR, 2014.
  • Unsupervised Deep Feature Extraction Of Hyperspectral Images. Romero, A., Gatta, C., Camps-Valls, G. IEEE Workshop on Hyperspectral Image and Signal Processing, Whispers, 2014.
  • Unsupervised Deep Feature Extraction for Remote Sensing Image Classification. Romero, A., Gatta, C., Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 2015.

HOCCA: Higher Order Canonical Correlation Analysis

HOCCA Image

HOCCA is a linear manifold learning technique that applies to datasets from the same source. It finds independent components in each dataset that are related across datasets, thus combining the goals of ICA and CCA.

References
  • Spatio-Chromatic Adaptation via Higher-Order Canonical Correlation Analysis of Natural Images. Gutmann, M.U., Laparra, V., Hyvärinen, A., Malo, J. PLoS ONE, 9(2):e86481, 2014.

KEMA: Kernel Manifold Alignment

KEMA Image

KEMA extends SSMA by using kernel methods for better semantic alignments of multisource data.

References
  • Kernel Manifold Alignment for Domain Adaptation. Tuia, D., Camps-Valls, G. PLoS ONE, 2016.

KSNR: Kernel Signal to Noise Ratio

KSNR Image

KSNR is a feature extraction method that maximizes signal variance while minimizing noise variance in a reproducing kernel Hilbert space (RKHS). It provides noise-free features for dimensionality reduction, outperforming kPCA in correlated noise scenarios.

References
  • Learning with the kernel signal to noise ratio. Gomez-Chova, L., Camps-Valls, G. IEEE International Workshop on Machine Learning for Signal Processing, MLSP, 2012.

OKECA: Optimized Kernel Entropy Component Analysis

OKECA Image

OKECA is a kernel feature extraction method based on entropy estimation in Hilbert spaces. It provides sparse and compact results, useful for data visualization and dimensionality reduction.

References
  • Optimized Kernel Entropy Components. Izquierdo-Verdiguier, E., Laparra, V., Jenssen, R., Gómez-Chova, L., Camps-Valls, G. IEEE Transactions on Neural Networks and Learning Systems, 2016.

PPA: Principal Polynomial Analysis

PPA Image

Principal Polynomial Analysis (PPA) is a manifold learning technique that generalizes PCA by using principal polynomials to capture nonlinear data patterns. It improves PCA’s energy compaction ability, reducing dimensionality reduction errors. PPA defines a manifold-dependent metric that generalizes Mahalanobis distance for curved manifolds.

References
  • Principal polynomial analysis. Laparra, V., Jiménez, S., Tuia, D., Camps-Valls, G., Malo, J. International Journal of Neural Systems, 24(7), 2014.

RBIG: Rotation-Based Iterative Gaussianization

RBIG Image

RBIG is an invertible multivariate Gaussianization transform that uses univariate histogram Gaussianization and multivariate rotation. This method is useful for multivariate PDF estimation and associated applications.

References
  • Iterative gaussianization: From ICA to random rotations. Laparra, V., Camps-Valls, G., Malo, J. IEEE Transactions on Neural Networks, 22(4):537-549, 2011.
  • PCA Gaussianization for one-class remote sensing image classification. Laparra, V., Muñoz-Marí, J., Camps-Valls, G., Malo, J. Proceedings of SPIE, 7477, 2009.
  • PCA Gaussianization for image processing. Laparra, V., Camps-Valls, G., Malo, J. Proceedings - International Conference on Image Processing, ICIP, 2009.

ROCK-PCA: Rotated Complex Kernel PCA for Nonlinear Spatio-Temporal Data Analysis

ROCK-PCA Image

The rotated complex kernel PCA (ROCK-PCA) works in reproducing kernel Hilbert spaces to account for nonlinear processes, operates in the complex domain to handle both spatial and temporal features and time-lagged correlations. It adds an extra rotation for improved flexibility and physical consistency, providing an explicitly resolved spatio-temporal decomposition of Earth and climate data cubes.

References
  • Nonlinear PCA for Spatio-Temporal Analysis of Earth Observation Data. Bueso, D., Piles, M., and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 58(8), 2020.

SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction

SIMFEAT Toolbox Image

SIMFEAT is a toolbox that includes linear and kernel feature extraction methods. Linear methods: PCA, MNF, CCA, PLS, OPLS. Kernel methods: KPCA, KMNF, KCCA, KPLS, KOPLS, KECA.

References
  • Kernel multivariate analysis framework for supervised subspace learning: A tutorial on linear and kernel multivariate methods. Arenas-Garcia, J., Petersen, K.B., Camps-Valls, G., Hansen, L.K. IEEE Signal Processing Magazine, 30(4):16-29, 2013.

SPCA: Sequential Principal Curves Analysis

SPCA Image

SPCA is an invertible manifold learning technique that generalizes PCA by using nonparametric principal curves instead of straight lines. It includes multivariate histogram equalization to fulfill either NonLinear ICA or optimal Vector Quantization.

References
  • Nonlinearities and adaptation of color vision from sequential principal curves analysis. Laparra, V., Jiménez, S., Camps-Valls, G., Malo, J. Neural Computation, 24(10):2751-2788, 2012.

SSKPLS: Semisupervised Kernel Partial Least Squares

SSKPLS Image

SSKPLS utilizes probabilistic cluster kernels for nonlinear feature extraction. It builds kernel functions from data, outperforming standard kernel functions and information theoretic kernels like Fisher and mutual information kernels.

References
  • Spectral clustering with the probabilistic cluster kernel. Izquierdo-Verdiguier, E., Jenssen, R., Gómez-Chova, L., Camps-Valls, G. Neurocomputing, 149(C):1299-1304, 2015.
  • Semisupervised kernel feature extraction for remote sensing image analysis. Izquierdo-Verdiguier, E., Gomez-Chova, L., Bruzzone, L., Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 52(9):5567-5578, 2014.

SSMA: SemiSupervised Manifold Alignment

SSMA Image

The SSMA Toolbox is a MATLAB tool for semisupervised manifold alignment of data without corresponding pairs, requiring only a small set of labeled samples in each domain.

References
  • Semisupervised manifold alignment of multimodal remote sensing images. Tuia, D., Volpi, M., Trolliet, M., Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing, 52(12):7708-7720, 2014.

\ No newline at end of file diff --git a/code/feature_extraction/index.xml b/code/feature_extraction/index.xml index 7589ee3c..d662aae5 100644 --- a/code/feature_extraction/index.xml +++ b/code/feature_extraction/index.xml @@ -1 +1 @@ -Feature extraction, dimensionality reduction and manifold learning on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/Recent content in Feature extraction, dimensionality reduction and manifold learning on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Feature extraction, dimensionality reduction and manifold learning on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/Recent content in Feature extraction, dimensionality reduction and manifold learning on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/kema/index.html b/code/feature_extraction/kema/index.html index c4998763..b11f9e28 100644 --- a/code/feature_extraction/kema/index.html +++ b/code/feature_extraction/kema/index.html @@ -2,4 +2,4 @@

KEMA: Kernel Manifold Alignment

\ No newline at end of file +

KEMA: Kernel Manifold Alignment

\ No newline at end of file diff --git a/code/feature_extraction/kema/index.xml b/code/feature_extraction/kema/index.xml index 07530b23..d3347f81 100644 --- a/code/feature_extraction/kema/index.xml +++ b/code/feature_extraction/kema/index.xml @@ -1 +1 @@ -KEMA: Kernel Manifold Alignment on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/kema/Recent content in KEMA: Kernel Manifold Alignment on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +KEMA: Kernel Manifold Alignment on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/kema/Recent content in KEMA: Kernel Manifold Alignment on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/ksnr/index.html b/code/feature_extraction/ksnr/index.html index 32a216d7..85dcbf6b 100644 --- a/code/feature_extraction/ksnr/index.html +++ b/code/feature_extraction/ksnr/index.html @@ -2,4 +2,4 @@

KSNR: Kernel Signal to Noise Ratio

\ No newline at end of file +

KSNR: Kernel Signal to Noise Ratio

\ No newline at end of file diff --git a/code/feature_extraction/ksnr/index.xml b/code/feature_extraction/ksnr/index.xml index efa3129b..ed0eabec 100644 --- a/code/feature_extraction/ksnr/index.xml +++ b/code/feature_extraction/ksnr/index.xml @@ -1 +1 @@ -KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/ksnr/Recent content in KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/ksnr/Recent content in KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/okeca/index.html b/code/feature_extraction/okeca/index.html index a004d61e..33939b89 100644 --- a/code/feature_extraction/okeca/index.html +++ b/code/feature_extraction/okeca/index.html @@ -2,4 +2,4 @@

OKECA: Optimized Kernel Entropy Component Analysis

\ No newline at end of file +

OKECA: Optimized Kernel Entropy Component Analysis

\ No newline at end of file diff --git a/code/feature_extraction/okeca/index.xml b/code/feature_extraction/okeca/index.xml index 0db81d86..ea5ac4c0 100644 --- a/code/feature_extraction/okeca/index.xml +++ b/code/feature_extraction/okeca/index.xml @@ -1 +1 @@ -OKECA: Optimized Kernel Entropy Component Analysis on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/okeca/Recent content in OKECA: Optimized Kernel Entropy Component Analysis on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +OKECA: Optimized Kernel Entropy Component Analysis on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/okeca/Recent content in OKECA: Optimized Kernel Entropy Component Analysis on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/ppa/content/index.html b/code/feature_extraction/ppa/content/index.html index f9ee2e4d..3758bc83 100644 --- a/code/feature_extraction/ppa/content/index.html +++ b/code/feature_extraction/ppa/content/index.html @@ -2,5 +2,5 @@

Principal Polynomial Analysis (PPA)

This paper (and toolbox) presents a new framework for manifold learning based on the use of a sequence of principal polynomials that capture the eventually nonlinear nature of the data. The proposed Principal Polynomial Analysis (PPA) is shown to generalize PCA by admitting curves instead of straight lines. As opposed to previous approaches following the same rationale, PPA reduces to performing canonical, univariate regressions which make it computationally feasible and easy to interpret analytically. We show that the PPA transform is a volume-preserving map, which guarantees the existence of the inverse since the determinant of the Jacobian is bounded. We propose a closed-form solution for the inverse map. Invertibility is an important advantage over other nonlinear dimensionality reduction methods because it permits to understand the identified features in the input domain where data have physical meaning. Moreover, invertibility allows to evaluate the dimensionality reduction performance in sensible units. Preserving the volume also allows to compute the reduction in multi-information achieved by the transform using only marginal operations. Additionally, PPA leads to a clear geometrical interpretation of the manifold: the computation of Frenet-Serret frames along the identified curves allow us to obtain generalized curvature and torsion of the manifold. Moreover, the analytical expression of the Jacobian simplifies the computation of the metric induced by the data. Performance in dimensionality reduction and redundancy reduction, as well as the theoretical properties of PPA, are experimentally tested in datasets from the UCI machine learning repository.

SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction

\ No newline at end of file diff --git a/code/feature_extraction/simfeat/index.xml b/code/feature_extraction/simfeat/index.xml index ac2a151f..7d91c838 100644 --- a/code/feature_extraction/simfeat/index.xml +++ b/code/feature_extraction/simfeat/index.xml @@ -1 +1 @@ -SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/simfeat/Recent content in SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/simfeat/Recent content in SIMFEAT: A Simple MATLAB(tm) Toolbox of Linear and Kernel Feature Extraction on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/feature_extraction/spca/content/index.html b/code/feature_extraction/spca/content/index.html index 5328fc74..e9cd2ba9 100644 --- a/code/feature_extraction/spca/content/index.html +++ b/code/feature_extraction/spca/content/index.html @@ -2,7 +2,7 @@

Sequential Principal Curves Analysis Toolbox (SPCA)

SPCA is a manifold learning technique that identifies the curvilinear coordinates of a data set. It defines an invertible transform that can be tuned for NonLinear ICA (infomax) or optimal Vector Quantization (error minimization), and can be used in Dimensionality Reduction, Domain Adaptation, and Classification problems. The explicit form of the identified features (and associated nonlinear ‘filters’) makes it useful to model sensors in theoretical neuroscience. +

Sequential Principal Curves Analysis Toolbox (SPCA)

SPCA is a manifold learning technique that identifies the curvilinear coordinates of a data set. It defines an invertible transform that can be tuned for NonLinear ICA (infomax) or optimal Vector Quantization (error minimization), and can be used in Dimensionality Reduction, Domain Adaptation, and Classification problems. The explicit form of the identified features (and associated nonlinear ‘filters’) makes it useful to model sensors in theoretical neuroscience. Illustrative Results I: Learning Nonlinear Features Identification of curved features and the effect of the metric in SPCA in a curved 2D manifold. Note the different marginal PDFs in the direction perpendicular to the principal curve: Laplacian and Uniform PDFs of increasing variance. Infomax and Error Minimization through SPCA. 500 randomly selected samples of the sets were transformed using SPCA with different metrics. Results are analyzed in terms of independence (Mutual Information) and reconstruction error (RMSE). Illustrative Results II: Image Coding According to Different Optimization Criteria.

SSMA: SemiSupervised Manifold Alignment

\ No newline at end of file diff --git a/code/feature_extraction/ssma/index.xml b/code/feature_extraction/ssma/index.xml index 0cd280f2..f62efc55 100644 --- a/code/feature_extraction/ssma/index.xml +++ b/code/feature_extraction/ssma/index.xml @@ -1 +1 @@ -SSMA: SemiSupervised Manifold Alignment on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/feature_extraction/ssma/Recent content in SSMA: SemiSupervised Manifold Alignment on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +SSMA: SemiSupervised Manifold Alignment on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/feature_extraction/ssma/Recent content in SSMA: SemiSupervised Manifold Alignment on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/image_video_processing/basicvideotools/content/index.html b/code/image_video_processing/basicvideotools/content/index.html index d66ef1b9..adb24a5d 100644 --- a/code/image_video_processing/basicvideotools/content/index.html +++ b/code/image_video_processing/basicvideotools/content/index.html @@ -2,4 +2,4 @@

Basic Video Tools: A Matlab Toolbox for Video Data and Spatio-Temporal Vision Models (J. Malo, J. Gutirrez and V. Laparra (c) Universitat de Valncia 1996 - 2014)

What is in BasicVideoTools?

BasicVideoTools is a Matlab/Octave Toolbox intendend to deal with video data and spatio-temporal vision models. In particular, it includes convenient *.m files to:

  • Read standard (VQEG and LIVE) video data
  • Rearrange video data (as for instance to perform statistical analysis)
  • Generate controlled sequences (controlled contrast, texture, and 2d and 3d speed)
  • Compute 3D Fourier transforms
  • Play with motion perception models (spatial texture and motion-sensitive cells of LGN, V1 and MT, and spatio-temporal CSF)
  • Visualize movies (achromatic only)

What is not in BasicVideoTools?

BasicVideoTools does not include:

  • Optical flow or motion estimation/compensation algorithms
  • Video Coding algorithms
  • Video Quality Mesures

If you are looking for the above, please consider downloading other Toolboxes:

Download BasicVideoTools!

  • The code (version 1.0. Use this version only for compatibility with the code in the experiments of the motion-aftereffect paper).

  • The code (version 3.0 -Not only improved sampling functions and additional motion sensitive cells, but also more things)

  • Optional data (not necessary to run the code): If you use these data please cite the VQEG and LIVE databases (for video), and the CVC Barcelona Database (for images)

    • Image data (1.8 GB). Luminance images from the CVC Barcelona Calibrated Image Database.

    • Video data (2.6 GB): Raw videos from the VQEG and LIVE video databases.

Installation and Requirements

  • Download the BasicVideoTools file(s)
  • Decompress at your machine in the folder BasicVideoTools (no location restrictions for this folder)
  • Update the matlab/octave path including all subfolders
  • Tested on Matlab 2006b and posterior Matlab versions
  • Video and image data are only required if you want to gather statistics from natural videos or from natural images with controlled speed

How to get started?

For a general overview please take a look at the contents.m file, or (after you included it in th path) just look for help by typing the name of the folder, for instance: help BasicVideoTools_v2.

For additional details on how to use the functions in practice, see the demos:

  • demo_motion_programs, demo on how to use most functions (except random dots and newtonian sequences).
  • example_random_dots_sequence, demo on random dots sequences with controlled flow.
  • example_newtonian_sequence, demo on physics-controlled sequences.

\ No newline at end of file +

Basic Video Tools: A Matlab Toolbox for Video Data and Spatio-Temporal Vision Models (J. Malo, J. Gutirrez and V. Laparra (c) Universitat de Valncia 1996 - 2014)

What is in BasicVideoTools?

BasicVideoTools is a Matlab/Octave Toolbox intendend to deal with video data and spatio-temporal vision models. In particular, it includes convenient *.m files to:

  • Read standard (VQEG and LIVE) video data
  • Rearrange video data (as for instance to perform statistical analysis)
  • Generate controlled sequences (controlled contrast, texture, and 2d and 3d speed)
  • Compute 3D Fourier transforms
  • Play with motion perception models (spatial texture and motion-sensitive cells of LGN, V1 and MT, and spatio-temporal CSF)
  • Visualize movies (achromatic only)

What is not in BasicVideoTools?

BasicVideoTools does not include:

  • Optical flow or motion estimation/compensation algorithms
  • Video Coding algorithms
  • Video Quality Mesures

If you are looking for the above, please consider downloading other Toolboxes:

Download BasicVideoTools!

  • The code (version 1.0. Use this version only for compatibility with the code in the experiments of the motion-aftereffect paper).

  • The code (version 3.0 -Not only improved sampling functions and additional motion sensitive cells, but also more things)

  • Optional data (not necessary to run the code): If you use these data please cite the VQEG and LIVE databases (for video), and the CVC Barcelona Database (for images)

    • Image data (1.8 GB). Luminance images from the CVC Barcelona Calibrated Image Database.

    • Video data (2.6 GB): Raw videos from the VQEG and LIVE video databases.

Installation and Requirements

  • Download the BasicVideoTools file(s)
  • Decompress at your machine in the folder BasicVideoTools (no location restrictions for this folder)
  • Update the matlab/octave path including all subfolders
  • Tested on Matlab 2006b and posterior Matlab versions
  • Video and image data are only required if you want to gather statistics from natural videos or from natural images with controlled speed

How to get started?

For a general overview please take a look at the contents.m file, or (after you included it in th path) just look for help by typing the name of the folder, for instance: help BasicVideoTools_v2.

For additional details on how to use the functions in practice, see the demos:

  • demo_motion_programs, demo on how to use most functions (except random dots and newtonian sequences).
  • example_random_dots_sequence, demo on random dots sequences with controlled flow.
  • example_newtonian_sequence, demo on physics-controlled sequences.

\ No newline at end of file diff --git a/code/image_video_processing/basicvideotools/index.html b/code/image_video_processing/basicvideotools/index.html index c0a93096..902c024a 100644 --- a/code/image_video_processing/basicvideotools/index.html +++ b/code/image_video_processing/basicvideotools/index.html @@ -2,4 +2,4 @@

BasicVideoTools

\ No newline at end of file +

BasicVideoTools

\ No newline at end of file diff --git a/code/image_video_processing/basicvideotools/index.xml b/code/image_video_processing/basicvideotools/index.xml index f0269d0d..87763627 100644 --- a/code/image_video_processing/basicvideotools/index.xml +++ b/code/image_video_processing/basicvideotools/index.xml @@ -1 +1 @@ -BasicVideoTools on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/image_video_processing/basicvideotools/Recent content in BasicVideoTools on ISP - Image and Signal Processing groupHugoen-usBasic Video Tools: A Matlab Toolbox for Video Data and Spatio-Temporal Vision Models (J. Malo, J. Gutirrez and V. Laparra (c) Universitat de Valncia 1996 - 2014)https://ipl-uv.github.io/code/image_video_processing/basicvideotools/content/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/code/image_video_processing/basicvideotools/content/ \ No newline at end of file +BasicVideoTools on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/image_video_processing/basicvideotools/Recent content in BasicVideoTools on ISP - Image and Signal Processing groupHugoen-usBasic Video Tools: A Matlab Toolbox for Video Data and Spatio-Temporal Vision Models (J. Malo, J. Gutirrez and V. Laparra (c) Universitat de Valncia 1996 - 2014)https://isp.uv.es/github/code/image_video_processing/basicvideotools/content/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/code/image_video_processing/basicvideotools/content/ \ No newline at end of file diff --git a/code/image_video_processing/index.html b/code/image_video_processing/index.html index 98f14168..f0495989 100644 --- a/code/image_video_processing/index.html +++ b/code/image_video_processing/index.html @@ -2,4 +2,4 @@

Image and video processing

BasicVideoTools

BasicVideoTools Image

A Matlab Toolbox with convenient functions to handle video data. It includes routines to read VQEG and LIVE databases, generate synthetic sequences with controlled 2D and 3D speed, spatio-temporal Fourier transforms, perceptual sensors and filters (V1 and MT cells), and spatio-temporal CSFs.

References
  • Importance of quantiser design compared to optimal multigrid motion estimation in video coding. Malo, J., Ferri, F.J., Gutierrez, J., and Epifanio, I. Electronics Letters, 36(9):807-809, 2000.
  • Video quality measures based on the standard spatial observer. Watson, A.B., and Malo, J. ICIP, 2002.

SpatioSpectralTools

SpatioSpectralTools Image

SpatioSpectralTools is a Matlab Toolbox for reflectance and illuminant estimation that uses spatial information to simplify the (otherwise ill-conditioned) inverse problem. The proposed analysis is useful to derive the spatio-spectral resolution required to solve a retrieval problem.

References
  • The role of spatial information in disentangling the irradiance-reflectance-transmittance ambiguity. Jimenez, S., and Malo, J. IEEE Transactions on Geoscience and Remote Sensing, 52(8):4881-4894, 2014.

VideoCodingTools

VideoCodingTools Image

VideoCodingTools is a Matlab Toolbox for motion estimation/compensation and video compression. Optical flow computation is done with perceptually meaningful hierarchical block matching, and residual quantization is done according to non-linear Human Visual System models.

References
  • Importance of quantiser design compared to optimal multigrid motion estimation in video coding. Malo, J., Ferri, F.J., Gutierrez, J., and Epifanio, I. Electronics Letters, 36(9):807-809, 2000.
  • Video quality measures based on the standard spatial observer. Watson, A.B., and Malo, J. ICIP, 2002.

VideoQualityTools

VideoQualityTools Image

VideoQualityTools is a Matlab Toolbox for perceptual video quality assessment based on the Standard Spatial Observer model augmented with Divisive Normalization. It performed second-best in VQEG Phase-I using no ad-hoc hand-crafted features.

References
  • Importance of quantiser design compared to optimal multigrid motion estimation in video coding. Malo, J., Ferri, F.J., Gutierrez, J., and Epifanio, I. Electronics Letters, 36(9):807-809, 2000.
  • Video quality measures based on the standard spatial observer. Watson, A.B., and Malo, J. ICIP, 2002.

ViStaCoRe: Visual Statistics Coding and Restoration Toolbox

ViStaCoRe Image

The ViStaCoRe Coding Package is a Matlab Toolbox for achromatic and color image compression that includes a set of transform coding algorithms based on (1) Human Vision Models of different accuracy, and (2) coefficient selection through Sparse Regression in local frequency domains (in particular SVR). The ViStaCoRe Restoration Package is a Matlab Toolbox for image restoration that includes (1) classical regularization techniques, (2) classical wavelet thresholding techniques, (3) regularization functionals based on non-linear Human Vision models, and (4) denoising techniques based on Kernel regression in wavelet domains.

References
  • Image denoising with kernels based on natural image relations. Laparra, V., Gutiérrez, J., Camps-Valls, G., and Malo, J. Journal of Machine Learning Research, 11:873-903, 2010.
  • On the suitable domain for SVM training in image coding. Camps-Valls, G., Gutiérrez, J., Gómez-Pérez, G., and Malo, J. Journal of Machine Learning Research, 9:49-66, 2008.
  • Regularization operators for natural images based on nonlinear perception models. Gutiérrez, J., Ferri, F.J., and Malo, J. IEEE Transactions on Image Processing, 15(1):189-200, 2006.
  • Nonlinear image representation for efficient perceptual coding. Malo, J., Epifanio, I., Navarro, R., and Simoncelli, E.P. IEEE Transactions on Image Processing, 15(1):68-80, 2006.

VistaQualityTools

VistaQualityTools Image

VistaQualityTools is a Matlab Toolbox for full reference color (and also achromatic) image quality assessment based on divisive normalization Human Vision models in the DCT and the Wavelet domains.

References
  • Divisive normalization image quality metric revisited. Laparra, V., Muñoz-Marí, J., and Malo, J. Journal of the Optical Society of America A: Optics and Image Science, and Vision, 27(4):852-864, 2010.

\ No newline at end of file +

Image and video processing

BasicVideoTools

BasicVideoTools Image

A Matlab Toolbox with convenient functions to handle video data. It includes routines to read VQEG and LIVE databases, generate synthetic sequences with controlled 2D and 3D speed, spatio-temporal Fourier transforms, perceptual sensors and filters (V1 and MT cells), and spatio-temporal CSFs.

References
  • Importance of quantiser design compared to optimal multigrid motion estimation in video coding. Malo, J., Ferri, F.J., Gutierrez, J., and Epifanio, I. Electronics Letters, 36(9):807-809, 2000.
  • Video quality measures based on the standard spatial observer. Watson, A.B., and Malo, J. ICIP, 2002.

SpatioSpectralTools

SpatioSpectralTools Image

SpatioSpectralTools is a Matlab Toolbox for reflectance and illuminant estimation that uses spatial information to simplify the (otherwise ill-conditioned) inverse problem. The proposed analysis is useful to derive the spatio-spectral resolution required to solve a retrieval problem.

References
  • The role of spatial information in disentangling the irradiance-reflectance-transmittance ambiguity. Jimenez, S., and Malo, J. IEEE Transactions on Geoscience and Remote Sensing, 52(8):4881-4894, 2014.

VideoCodingTools

VideoCodingTools Image

VideoCodingTools is a Matlab Toolbox for motion estimation/compensation and video compression. Optical flow computation is done with perceptually meaningful hierarchical block matching, and residual quantization is done according to non-linear Human Visual System models.

References
  • Importance of quantiser design compared to optimal multigrid motion estimation in video coding. Malo, J., Ferri, F.J., Gutierrez, J., and Epifanio, I. Electronics Letters, 36(9):807-809, 2000.
  • Video quality measures based on the standard spatial observer. Watson, A.B., and Malo, J. ICIP, 2002.

VideoQualityTools

VideoQualityTools Image

VideoQualityTools is a Matlab Toolbox for perceptual video quality assessment based on the Standard Spatial Observer model augmented with Divisive Normalization. It performed second-best in VQEG Phase-I using no ad-hoc hand-crafted features.

References
  • Importance of quantiser design compared to optimal multigrid motion estimation in video coding. Malo, J., Ferri, F.J., Gutierrez, J., and Epifanio, I. Electronics Letters, 36(9):807-809, 2000.
  • Video quality measures based on the standard spatial observer. Watson, A.B., and Malo, J. ICIP, 2002.

ViStaCoRe: Visual Statistics Coding and Restoration Toolbox

ViStaCoRe Image

The ViStaCoRe Coding Package is a Matlab Toolbox for achromatic and color image compression that includes a set of transform coding algorithms based on (1) Human Vision Models of different accuracy, and (2) coefficient selection through Sparse Regression in local frequency domains (in particular SVR). The ViStaCoRe Restoration Package is a Matlab Toolbox for image restoration that includes (1) classical regularization techniques, (2) classical wavelet thresholding techniques, (3) regularization functionals based on non-linear Human Vision models, and (4) denoising techniques based on Kernel regression in wavelet domains.

References
  • Image denoising with kernels based on natural image relations. Laparra, V., Gutiérrez, J., Camps-Valls, G., and Malo, J. Journal of Machine Learning Research, 11:873-903, 2010.
  • On the suitable domain for SVM training in image coding. Camps-Valls, G., Gutiérrez, J., Gómez-Pérez, G., and Malo, J. Journal of Machine Learning Research, 9:49-66, 2008.
  • Regularization operators for natural images based on nonlinear perception models. Gutiérrez, J., Ferri, F.J., and Malo, J. IEEE Transactions on Image Processing, 15(1):189-200, 2006.
  • Nonlinear image representation for efficient perceptual coding. Malo, J., Epifanio, I., Navarro, R., and Simoncelli, E.P. IEEE Transactions on Image Processing, 15(1):68-80, 2006.

VistaQualityTools

VistaQualityTools Image

VistaQualityTools is a Matlab Toolbox for full reference color (and also achromatic) image quality assessment based on divisive normalization Human Vision models in the DCT and the Wavelet domains.

References
  • Divisive normalization image quality metric revisited. Laparra, V., Muñoz-Marí, J., and Malo, J. Journal of the Optical Society of America A: Optics and Image Science, and Vision, 27(4):852-864, 2010.

\ No newline at end of file diff --git a/code/image_video_processing/index.xml b/code/image_video_processing/index.xml index d69cb5c5..9156a73e 100644 --- a/code/image_video_processing/index.xml +++ b/code/image_video_processing/index.xml @@ -1 +1 @@ -Image and video processing on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/image_video_processing/Recent content in Image and video processing on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Image and video processing on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/image_video_processing/Recent content in Image and video processing on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/image_video_processing/spatiospectraltools/content/index.html b/code/image_video_processing/spatiospectraltools/content/index.html index 23c230c8..8081af2d 100644 --- a/code/image_video_processing/spatiospectraltools/content/index.html +++ b/code/image_video_processing/spatiospectraltools/content/index.html @@ -2,5 +2,5 @@

The role of spatial information in disentangling the irradiance-reflectance-transmitance ambiguity

In the satellite hyperspectral measures the contributions of light, surface, and atmosphere are mixed. Applications need separate access to the sources. Conventional inversion techniques usually take a pixel-wise, spectral-only approach. However, recent improvements in retrieving surface and atmosphere characteristics use heuristic spatial smoothness constraints.

In this paper we theoretically justify such heuristics by analyzing the impact of spatial information on the uncertainty of the solution. The proposed analysis allows to assess in advance the uniqueness (or robustness) of the solution depending on the curvature of a likelihood surface. In situations where pixel-based approaches become unreliable it turns out that the consideration of spatial information always makes the problem to be better conditioned. With the proposed analysis this is easily understood since the curvature is consistent with the complexity of the sources measured in terms of the number of significant eigenvalues (or free parameters in the problem). In agreement with recent results in hyperspectral image coding, spatial correlations in the sources imply that the intrinsic complexity of the spatio-spectral representation of the signal is always lower than its spectral-only counterpart. According to this, the number of free parameters in the spatio-spectral inverse problem is smaller so the spatio-spectral approaches are always better than spectral-only approaches.

Experiments using ensembles of actual reflectance values and realistic MODTRAN irradiance and atmosphere radiance and transmittance values show that the proposed analysis successfully predicts the practical difficulty of the problem and the improved quality of spatio-spectral retrieval.

Supplementary Material

  1. Extends the results in the manuscript to different spatial structures.
  2. Extends the results in the manuscript to different wavelength ranges and spatio-spectral resolutions.
  3. Statistically justifies the initialization scheme of sources.
  4. Provides sample data and code.

The generality of the conclusion is not surprising since the imaging equation and the PCA decompositions do not depend on the specific spatio-spectral resolution or wavelength range. The joint spatio-spectral approach will simplify the problem whenever there are relations between the signal at different spatial positions, which is true in a wide range of situations given the spatial continuity of the physical sources (the reflecting objects and the atmospheric phenomena).

1. Effect of the Spatial Structure

Original and estimated reflectance images for sites of different spatial complexity (urban, forest, and open fields) using spectral-only and spatio-spectral retrieval. In these cases, the spatial resolution and wavelength range were the same as in the manuscript. No additional training was necessary, only the application of the previous analysis on new test locations.

2. Effect of Wavelength Range and Spatio-Spectral Resolution

In this experiment, we used substantially different wavelength ranges and spatio-spectral resolutions from those in the manuscript.

3. Initialization of the Sources (Surface Reflectance, Atmosphere Radiance, and Transmittance)

The retrieval procedure used to check the accuracy of the theoretical predictions involves a series of search loops that require an initialization of the variables S, A, and T. Since we searched in the decorrelated PCA domains, each coefficient of these sources was independently initialized using a random value drawn from the empirical marginal PDFs (histograms) learned at the training stage. Below we show examples of the marginal PDFs for some AC coefficients of these sources. The strong peak at zero makes zero initialization reasonable as well.

Epsilon-Huber Support Vector Regression

\ No newline at end of file diff --git a/code/regression/epsilon/index.xml b/code/regression/epsilon/index.xml index a6792f5b..96d2d642 100644 --- a/code/regression/epsilon/index.xml +++ b/code/regression/epsilon/index.xml @@ -1 +1 @@ -Epsilon-Huber Support Vector Regression on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/epsilon/Recent content in Epsilon-Huber Support Vector Regression on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Epsilon-Huber Support Vector Regression on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/epsilon/Recent content in Epsilon-Huber Support Vector Regression on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/fair_kernel/index.html b/code/regression/fair_kernel/index.html index 2caf23c4..04c3556a 100644 --- a/code/regression/fair_kernel/index.html +++ b/code/regression/fair_kernel/index.html @@ -2,4 +2,4 @@

Fair Kernel Learning

\ No newline at end of file +

Fair Kernel Learning

\ No newline at end of file diff --git a/code/regression/fair_kernel/index.xml b/code/regression/fair_kernel/index.xml index 6565702d..b944eb83 100644 --- a/code/regression/fair_kernel/index.xml +++ b/code/regression/fair_kernel/index.xml @@ -1 +1 @@ -Fair Kernel Learning on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/fair_kernel/Recent content in Fair Kernel Learning on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Fair Kernel Learning on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/fair_kernel/Recent content in Fair Kernel Learning on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/gaussian_deep/content/index.html b/code/regression/gaussian_deep/content/index.html index d6f79b53..c4281760 100644 --- a/code/regression/gaussian_deep/content/index.html +++ b/code/regression/gaussian_deep/content/index.html @@ -2,4 +2,4 @@

Deep Gaussian Processes for Parameter Retrieval and Model Inversion

Parameter retrieval and model inversion are key problems in remote sensing and Earth observation. Currently, different approximations exist: a direct, yet costly, inversion of radiative transfer models (RTMs); the statistical inversion with {m in situ} data that often results in problems of extrapolation outside the study area; and the most widely adopted hybrid modeling by which statistical models, mostly nonlinear and non-parametric machine learning algorithms, are applied to invert RTM simulations. We will focus on the latter. Among the different existing algorithms, in the last decade, Gaussian Processes (GPs) have excelled in treating these problems because of the provided accuracy and confidence intervals. However, RTMs are very complex, highly nonlinear, and typically hierarchical models, so that very often a single (shallow) GP model cannot capture complex feature relations for inversion. This motivates the use of deeper hierarchical architectures, while still preserving the desirable properties of GPs. This paper introduces deep Gaussian Processes (DGPs) for bio-geo-physical model inversion. Unlike full GP models, the proposed DGP accounts for complicated (modular, hierarchical) processes, provides an efficient solution that scales well to big datasets, and improves prediction accuracy over its single layer counterpart. In the experimental section, we provide empirical evidence of performance for the estimation of surface temperature and dew point temperature from infrared sounding data, as well as for the predicting of chlorophyll content, inorganic suspended matter and coloured dissolved matter from multispectral data acquired by the Sentinel-3 OLCI sensor. The presented methodology allows for more expressive forms of GPs in big remote sensing model inversion problems.

References

  • Deep Gaussian Processes for Parameter Retrieval and Model Inversion
    Daniel H. Svendsen, Pablo M. Alvarez, Ana Belen Ruescas, Rafael Molina and Gustau Camps-Valls

Download

We provide the code and data to reproduce the velocity map for hurricane Isabel. Our experiment uses a single layer of the simulated hurricane - for access to the full data-set see earthsystemgrid webpage. We also provide the code for the Case2Extreme experiment without access to the code. This serves more as a guideline to applying the DGP to large datasets. The Double Stochastic DGP implementation can be found at this github page. Two notebooks are provided for illustration purposes and reproducibility:

\ No newline at end of file +

Deep Gaussian Processes for Parameter Retrieval and Model Inversion

Parameter retrieval and model inversion are key problems in remote sensing and Earth observation. Currently, different approximations exist: a direct, yet costly, inversion of radiative transfer models (RTMs); the statistical inversion with {m in situ} data that often results in problems of extrapolation outside the study area; and the most widely adopted hybrid modeling by which statistical models, mostly nonlinear and non-parametric machine learning algorithms, are applied to invert RTM simulations. We will focus on the latter. Among the different existing algorithms, in the last decade, Gaussian Processes (GPs) have excelled in treating these problems because of the provided accuracy and confidence intervals. However, RTMs are very complex, highly nonlinear, and typically hierarchical models, so that very often a single (shallow) GP model cannot capture complex feature relations for inversion. This motivates the use of deeper hierarchical architectures, while still preserving the desirable properties of GPs. This paper introduces deep Gaussian Processes (DGPs) for bio-geo-physical model inversion. Unlike full GP models, the proposed DGP accounts for complicated (modular, hierarchical) processes, provides an efficient solution that scales well to big datasets, and improves prediction accuracy over its single layer counterpart. In the experimental section, we provide empirical evidence of performance for the estimation of surface temperature and dew point temperature from infrared sounding data, as well as for the predicting of chlorophyll content, inorganic suspended matter and coloured dissolved matter from multispectral data acquired by the Sentinel-3 OLCI sensor. The presented methodology allows for more expressive forms of GPs in big remote sensing model inversion problems.

References

  • Deep Gaussian Processes for Parameter Retrieval and Model Inversion
    Daniel H. Svendsen, Pablo M. Alvarez, Ana Belen Ruescas, Rafael Molina and Gustau Camps-Valls

Download

We provide the code and data to reproduce the velocity map for hurricane Isabel. Our experiment uses a single layer of the simulated hurricane - for access to the full data-set see earthsystemgrid webpage. We also provide the code for the Case2Extreme experiment without access to the code. This serves more as a guideline to applying the DGP to large datasets. The Double Stochastic DGP implementation can be found at this github page. Two notebooks are provided for illustration purposes and reproducibility:

\ No newline at end of file diff --git a/code/regression/gaussian_deep/index.html b/code/regression/gaussian_deep/index.html index 5ba03c6e..688d921e 100644 --- a/code/regression/gaussian_deep/index.html +++ b/code/regression/gaussian_deep/index.html @@ -2,4 +2,4 @@

Deep Gaussian processes for biophysical parameter estimation

\ No newline at end of file +

Deep Gaussian processes for biophysical parameter estimation

\ No newline at end of file diff --git a/code/regression/gaussian_deep/index.xml b/code/regression/gaussian_deep/index.xml index 95bb4343..369d4c8e 100644 --- a/code/regression/gaussian_deep/index.xml +++ b/code/regression/gaussian_deep/index.xml @@ -1 +1 @@ -Deep Gaussian processes for biophysical parameter estimation on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/gaussian_deep/Recent content in Deep Gaussian processes for biophysical parameter estimation on ISP - Image and Signal Processing groupHugoen-usDeep Gaussian Processes for Parameter Retrieval and Model Inversionhttps://ipl-uv.github.io/code/regression/gaussian_deep/content/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/code/regression/gaussian_deep/content/ \ No newline at end of file +Deep Gaussian processes for biophysical parameter estimation on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/gaussian_deep/Recent content in Deep Gaussian processes for biophysical parameter estimation on ISP - Image and Signal Processing groupHugoen-usDeep Gaussian Processes for Parameter Retrieval and Model Inversionhttps://isp.uv.es/github/code/regression/gaussian_deep/content/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/code/regression/gaussian_deep/content/ \ No newline at end of file diff --git a/code/regression/gaussian_joint/index.html b/code/regression/gaussian_joint/index.html index f801a3f0..e9d85236 100644 --- a/code/regression/gaussian_joint/index.html +++ b/code/regression/gaussian_joint/index.html @@ -2,4 +2,4 @@

Joint Gaussian processes

\ No newline at end of file +

Joint Gaussian processes

\ No newline at end of file diff --git a/code/regression/gaussian_joint/index.xml b/code/regression/gaussian_joint/index.xml index 3fce0d17..4af0537a 100644 --- a/code/regression/gaussian_joint/index.xml +++ b/code/regression/gaussian_joint/index.xml @@ -1 +1 @@ -Joint Gaussian processes on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/gaussian_joint/Recent content in Joint Gaussian processes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Joint Gaussian processes on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/gaussian_joint/Recent content in Joint Gaussian processes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/gaussian_noise/index.html b/code/regression/gaussian_noise/index.html index 3b6d3137..0e348b3b 100644 --- a/code/regression/gaussian_noise/index.html +++ b/code/regression/gaussian_noise/index.html @@ -2,4 +2,4 @@

Gaussian processes with input noise

\ No newline at end of file +

Gaussian processes with input noise

\ No newline at end of file diff --git a/code/regression/gaussian_noise/index.xml b/code/regression/gaussian_noise/index.xml index 958c942e..688de1e7 100644 --- a/code/regression/gaussian_noise/index.xml +++ b/code/regression/gaussian_noise/index.xml @@ -1 +1 @@ -Gaussian processes with input noise on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/gaussian_noise/Recent content in Gaussian processes with input noise on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Gaussian processes with input noise on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/gaussian_noise/Recent content in Gaussian processes with input noise on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/gaussian_repository/index.html b/code/regression/gaussian_repository/index.html index a2d0b304..245c17bc 100644 --- a/code/regression/gaussian_repository/index.html +++ b/code/regression/gaussian_repository/index.html @@ -2,4 +2,4 @@

A comprehensive Gaussian processes repository

\ No newline at end of file +

A comprehensive Gaussian processes repository

\ No newline at end of file diff --git a/code/regression/gaussian_repository/index.xml b/code/regression/gaussian_repository/index.xml index 838ac4bf..32df416c 100644 --- a/code/regression/gaussian_repository/index.xml +++ b/code/regression/gaussian_repository/index.xml @@ -1 +1 @@ -A comprehensive Gaussian processes repository on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/gaussian_repository/Recent content in A comprehensive Gaussian processes repository on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +A comprehensive Gaussian processes repository on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/gaussian_repository/Recent content in A comprehensive Gaussian processes repository on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/index.html b/code/regression/index.html index 9f1b13e9..af112d32 100644 --- a/code/regression/index.html +++ b/code/regression/index.html @@ -2,4 +2,4 @@

Regression, system identification and function approximation

A comprehensive Gaussian processes repository

Gaussian Processes Image

A comprehensive repo on Gaussian processes code, literature, and model zoo.

References
  • A comprehensive Gaussian processes repository. J. E. Johnson, Tech Rep 2019/12B, Universitat de Valencia, 2019.

ARX-RVM: Autorregressive eXogenous Relevance Vector Machine

ARX-RVM Image

Nonlinear system identification using Relevance Vector Machines (RVM). The ARX-RVM yields confidence intervals for the predictions and provides a better trade-off between accuracy and sparsity.

References
  • Nonlinear system identification with composite relevance vector machines. Camps-Valls, G., Martínez-Ramón, M., Rojo-Álvarez, J.L., Muñoz-Marí, J. IEEE Signal Processing Letters, 14(4):279-282, 2007.

Deep Gaussian processes for biophysical parameter estimation

Deep Gaussian Processes Image

Deep Gaussian Processes (DGPs) for bio-geo-physical model inversion. Unlike shallow GP models, DGPs account for complicated hierarchical processes, provide an efficient solution that scales well to big datasets, and improve prediction accuracy over single-layer models.

References
  • Deep Gaussian Processes for Parameter Retrieval and Model Inversion. Daniel H. Svendsen, Pablo M. Alvarez, Ana Belen Ruescas, Rafael Molina, and Gustau Camps-Valls, Submitted, 2020.
  • Svendsen, Daniel H. et al. Deep Gaussian Processes for Geophysical Parameter Retrieval. IGARSS 2018, IEEE, 2018.

Epsilon-Huber Support Vector Regression

Epsilon-Huber Support Vector Regression Image

The combination of Vapnik’s e-insensitive loss function and the Huber cost function enhances performance in the presence of different noise sources. Applied to system identification, gamma-filtering, and SVR.

References
  • A unified SVM framework for signal estimation. Rojo-Álvarez, J.L., Martínez-Ramón, M., Muñoz-Marí, J., Camps-Valls, G. Digital Signal Processing: A Review Journal, 26(1):1-20, 2014.

Fair Kernel Learning

Fair Kernel Learning Image

Fair Kernel Learning methods for regression and dimensionality reduction built on a previously proposed fair classification framework. The methods rely on the Hilbert-Schmidt independence criterion as the fairness term, which simplifies the problem and allows the inclusion of multiple sensitive variables simultaneously.

References
  • Fair Kernel Learning. Adrián Pérez-Suay, Valero Laparra, Gonzalo Mateo-García, Jordi Muñoz-Marí, Luis Gómez-Chova, and Gustau Camps-Valls. ECML PKDD 2017 (Accepted).

Gaussian processes with input noise

Gaussian Processes with Input Noise Image

Gaussian processes with input noise.

References
  • Accounting for Input Noise in Gaussian Process Parameter Retrieval. Johnson, J. E., Laparra, V., Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 17(3):391-395, 2020.

Joint Gaussian processes

Joint Gaussian Processes Image

A nonlinear nonparametric regression model which combines knowledge from real observations and simulated data from physical models. The Joint Gaussian Process (JGP) automatically detects the relative quality of the simulated and real data and combines them accordingly.

References
  • Svendsen, D. H., Martino, L., Campos-Taberner, M., García-Haro, F. J., & Camps-Valls, G. (2017). Joint Gaussian processes for biophysical parameter retrieval. IEEE Transactions on Geoscience and Remote Sensing, 56(3):1718-1727.
  • Bonilla, Edwin V., Kian M. Chai, and Christopher Williams. Multi-task Gaussian process prediction. Advances in Neural Information Processing Systems, 2008.

KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine

KARMA Image

Nonlinear system identification using the Kernel ARMA model with Support Vector Machines (SVM). Explicitly considers an ARMA model in RKHS, resulting in improved accuracy in system identification tasks.

References
  • Support vector machines for nonlinear Kernel ARMA system identification. Martínez-Ramón, M., Rojo-Álvarez, J.L., Camps-Valls, G., Muñoz-Marí, J. IEEE Transactions on Neural Networks, 17(6):1617-1622, 2006.

KSNR: Kernel Signal to Noise Ratio

KSNR Image

The Kernel Signal to Noise Ratio (KSNR) model maximizes signal variance while minimizing noise variance in RKHS. It is especially useful for handling correlated and non-Gaussian noise.

References
  • Learning with the kernel signal to noise ratio. Gomez-Chova, L., Camps-Valls, G. IEEE International Workshop on Machine Learning for Signal Processing, MLSP, 2012.

MSVR: Multioutput Support Vector Regression

MSVR Image

M-SVR extends the single-output SVR by considering nonlinear relations between features and among the output variables, which are typically inter-dependent.

References
  • Multioutput support vector regression for remote sensing biophysical parameter estimation. Tuia, D., Verrelst, J., Alonso, L., Perez-Cruz, F., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 8(4):804-808, 2011.

simpleR v2.1: simple Regression toolbox

simpleR Image

The simple Regression toolbox, simpleR, contains a set of functions in Matlab to illustrate the capabilities of several statistical regression algorithms. simpleR contains simple educational code for linear regression (LR), decision trees (TREE), neural networks (NN), support vector regression (SVR), kernel ridge regression (KRR), Gaussian Process Regression (GPR), and Variational Heteroscedastic Gaussian Process Regression (VHGPR). A dataset of spectra and associated chlorophyll content is included to illustrate training/testing procedures.

References
  • Retrieval of biophysical parameters with heteroscedastic Gaussian processes. Lázaro-Gredilla, M., Titsias, M.K., Verrelst, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(4):838-842, 2014.
  • Prediction of daily global solar irradiation using temporal Gaussian processes. Salcedo-Sanz, S., Casanova-Mateo, C., Muñoz-Marí, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(11):1936-1940, 2014.

SS-SVR: Semi-supervised Support Vector Regression

SS-SVR Image

Semi-supervised SVR algorithms tested in multiplatform LAI estimation and oceanic chlorophyll concentration prediction, showing good generalization capabilities when few labeled samples are available.

References
  • Biophysical parameter estimation with a semisupervised support vector machine. Camps-Valls, G., Munoz-Marí, J., Gómez-Chova, L., Richter, K., and Calpe-Maravilla, J. IEEE Geoscience and Remote Sensing Letters, 6(2):248-252, 2009.

\ No newline at end of file +

Regression, system identification and function approximation

A comprehensive Gaussian processes repository

Gaussian Processes Image

A comprehensive repo on Gaussian processes code, literature, and model zoo.

References
  • A comprehensive Gaussian processes repository. J. E. Johnson, Tech Rep 2019/12B, Universitat de Valencia, 2019.

ARX-RVM: Autorregressive eXogenous Relevance Vector Machine

ARX-RVM Image

Nonlinear system identification using Relevance Vector Machines (RVM). The ARX-RVM yields confidence intervals for the predictions and provides a better trade-off between accuracy and sparsity.

References
  • Nonlinear system identification with composite relevance vector machines. Camps-Valls, G., Martínez-Ramón, M., Rojo-Álvarez, J.L., Muñoz-Marí, J. IEEE Signal Processing Letters, 14(4):279-282, 2007.

Deep Gaussian processes for biophysical parameter estimation

Deep Gaussian Processes Image

Deep Gaussian Processes (DGPs) for bio-geo-physical model inversion. Unlike shallow GP models, DGPs account for complicated hierarchical processes, provide an efficient solution that scales well to big datasets, and improve prediction accuracy over single-layer models.

References
  • Deep Gaussian Processes for Parameter Retrieval and Model Inversion. Daniel H. Svendsen, Pablo M. Alvarez, Ana Belen Ruescas, Rafael Molina, and Gustau Camps-Valls, Submitted, 2020.
  • Svendsen, Daniel H. et al. Deep Gaussian Processes for Geophysical Parameter Retrieval. IGARSS 2018, IEEE, 2018.

Epsilon-Huber Support Vector Regression

Epsilon-Huber Support Vector Regression Image

The combination of Vapnik’s e-insensitive loss function and the Huber cost function enhances performance in the presence of different noise sources. Applied to system identification, gamma-filtering, and SVR.

References
  • A unified SVM framework for signal estimation. Rojo-Álvarez, J.L., Martínez-Ramón, M., Muñoz-Marí, J., Camps-Valls, G. Digital Signal Processing: A Review Journal, 26(1):1-20, 2014.

Fair Kernel Learning

Fair Kernel Learning Image

Fair Kernel Learning methods for regression and dimensionality reduction built on a previously proposed fair classification framework. The methods rely on the Hilbert-Schmidt independence criterion as the fairness term, which simplifies the problem and allows the inclusion of multiple sensitive variables simultaneously.

References
  • Fair Kernel Learning. Adrián Pérez-Suay, Valero Laparra, Gonzalo Mateo-García, Jordi Muñoz-Marí, Luis Gómez-Chova, and Gustau Camps-Valls. ECML PKDD 2017 (Accepted).

Gaussian processes with input noise

Gaussian Processes with Input Noise Image

Gaussian processes with input noise.

References
  • Accounting for Input Noise in Gaussian Process Parameter Retrieval. Johnson, J. E., Laparra, V., Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 17(3):391-395, 2020.

Joint Gaussian processes

Joint Gaussian Processes Image

A nonlinear nonparametric regression model which combines knowledge from real observations and simulated data from physical models. The Joint Gaussian Process (JGP) automatically detects the relative quality of the simulated and real data and combines them accordingly.

References
  • Svendsen, D. H., Martino, L., Campos-Taberner, M., García-Haro, F. J., & Camps-Valls, G. (2017). Joint Gaussian processes for biophysical parameter retrieval. IEEE Transactions on Geoscience and Remote Sensing, 56(3):1718-1727.
  • Bonilla, Edwin V., Kian M. Chai, and Christopher Williams. Multi-task Gaussian process prediction. Advances in Neural Information Processing Systems, 2008.

KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine

KARMA Image

Nonlinear system identification using the Kernel ARMA model with Support Vector Machines (SVM). Explicitly considers an ARMA model in RKHS, resulting in improved accuracy in system identification tasks.

References
  • Support vector machines for nonlinear Kernel ARMA system identification. Martínez-Ramón, M., Rojo-Álvarez, J.L., Camps-Valls, G., Muñoz-Marí, J. IEEE Transactions on Neural Networks, 17(6):1617-1622, 2006.

KSNR: Kernel Signal to Noise Ratio

KSNR Image

The Kernel Signal to Noise Ratio (KSNR) model maximizes signal variance while minimizing noise variance in RKHS. It is especially useful for handling correlated and non-Gaussian noise.

References
  • Learning with the kernel signal to noise ratio. Gomez-Chova, L., Camps-Valls, G. IEEE International Workshop on Machine Learning for Signal Processing, MLSP, 2012.

MSVR: Multioutput Support Vector Regression

MSVR Image

M-SVR extends the single-output SVR by considering nonlinear relations between features and among the output variables, which are typically inter-dependent.

References
  • Multioutput support vector regression for remote sensing biophysical parameter estimation. Tuia, D., Verrelst, J., Alonso, L., Perez-Cruz, F., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 8(4):804-808, 2011.

simpleR v2.1: simple Regression toolbox

simpleR Image

The simple Regression toolbox, simpleR, contains a set of functions in Matlab to illustrate the capabilities of several statistical regression algorithms. simpleR contains simple educational code for linear regression (LR), decision trees (TREE), neural networks (NN), support vector regression (SVR), kernel ridge regression (KRR), Gaussian Process Regression (GPR), and Variational Heteroscedastic Gaussian Process Regression (VHGPR). A dataset of spectra and associated chlorophyll content is included to illustrate training/testing procedures.

References
  • Retrieval of biophysical parameters with heteroscedastic Gaussian processes. Lázaro-Gredilla, M., Titsias, M.K., Verrelst, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(4):838-842, 2014.
  • Prediction of daily global solar irradiation using temporal Gaussian processes. Salcedo-Sanz, S., Casanova-Mateo, C., Muñoz-Marí, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(11):1936-1940, 2014.

SS-SVR: Semi-supervised Support Vector Regression

SS-SVR Image

Semi-supervised SVR algorithms tested in multiplatform LAI estimation and oceanic chlorophyll concentration prediction, showing good generalization capabilities when few labeled samples are available.

References
  • Biophysical parameter estimation with a semisupervised support vector machine. Camps-Valls, G., Munoz-Marí, J., Gómez-Chova, L., Richter, K., and Calpe-Maravilla, J. IEEE Geoscience and Remote Sensing Letters, 6(2):248-252, 2009.

\ No newline at end of file diff --git a/code/regression/index.xml b/code/regression/index.xml index fcb51041..982b6482 100644 --- a/code/regression/index.xml +++ b/code/regression/index.xml @@ -1 +1 @@ -Regression, system identification and function approximation on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/Recent content in Regression, system identification and function approximation on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Regression, system identification and function approximation on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/Recent content in Regression, system identification and function approximation on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/karma/index.html b/code/regression/karma/index.html index 292d31d5..4adb0245 100644 --- a/code/regression/karma/index.html +++ b/code/regression/karma/index.html @@ -2,4 +2,4 @@

KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine

\ No newline at end of file +

KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine

\ No newline at end of file diff --git a/code/regression/karma/index.xml b/code/regression/karma/index.xml index 826dd783..d8919691 100644 --- a/code/regression/karma/index.xml +++ b/code/regression/karma/index.xml @@ -1 +1 @@ -KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/karma/Recent content in KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/karma/Recent content in KARMA: Kernel AutoRegressive Moving Average with the Support Vector Machine on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/ksnr/index.html b/code/regression/ksnr/index.html index 32a216d7..85dcbf6b 100644 --- a/code/regression/ksnr/index.html +++ b/code/regression/ksnr/index.html @@ -2,4 +2,4 @@

KSNR: Kernel Signal to Noise Ratio

\ No newline at end of file +

KSNR: Kernel Signal to Noise Ratio

\ No newline at end of file diff --git a/code/regression/ksnr/index.xml b/code/regression/ksnr/index.xml index 9a48f2a4..2b043482 100644 --- a/code/regression/ksnr/index.xml +++ b/code/regression/ksnr/index.xml @@ -1 +1 @@ -KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/ksnr/Recent content in KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/ksnr/Recent content in KSNR: Kernel Signal to Noise Ratio on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/msvr/index.html b/code/regression/msvr/index.html index b41e6165..6e292d9e 100644 --- a/code/regression/msvr/index.html +++ b/code/regression/msvr/index.html @@ -2,4 +2,4 @@

MSVR: Multioutput Support Vector Regression

\ No newline at end of file +

MSVR: Multioutput Support Vector Regression

\ No newline at end of file diff --git a/code/regression/msvr/index.xml b/code/regression/msvr/index.xml index 2f7a3b70..c5d2dc5f 100644 --- a/code/regression/msvr/index.xml +++ b/code/regression/msvr/index.xml @@ -1 +1 @@ -MSVR: Multioutput Support Vector Regression on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/msvr/Recent content in MSVR: Multioutput Support Vector Regression on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +MSVR: Multioutput Support Vector Regression on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/msvr/Recent content in MSVR: Multioutput Support Vector Regression on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/simpler/index.html b/code/regression/simpler/index.html index 8288954c..d89b84cd 100644 --- a/code/regression/simpler/index.html +++ b/code/regression/simpler/index.html @@ -2,4 +2,4 @@

simpleR v2.1: simple Regression toolbox

\ No newline at end of file +

simpleR v2.1: simple Regression toolbox

\ No newline at end of file diff --git a/code/regression/simpler/index.xml b/code/regression/simpler/index.xml index ade887ba..6fcce513 100644 --- a/code/regression/simpler/index.xml +++ b/code/regression/simpler/index.xml @@ -1 +1 @@ -simpleR v2.1: simple Regression toolbox on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/simpler/Recent content in simpleR v2.1: simple Regression toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +simpleR v2.1: simple Regression toolbox on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/simpler/Recent content in simpleR v2.1: simple Regression toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/regression/ss_svr/index.html b/code/regression/ss_svr/index.html index 4a91c32a..48f061b4 100644 --- a/code/regression/ss_svr/index.html +++ b/code/regression/ss_svr/index.html @@ -2,4 +2,4 @@

SS-SVR: Semi-supervised Support Vector Regression

\ No newline at end of file +

SS-SVR: Semi-supervised Support Vector Regression

\ No newline at end of file diff --git a/code/regression/ss_svr/index.xml b/code/regression/ss_svr/index.xml index ca08a9df..b710f402 100644 --- a/code/regression/ss_svr/index.xml +++ b/code/regression/ss_svr/index.xml @@ -1 +1 @@ -SS-SVR: Semi-supervised Support Vector Regression on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/regression/ss_svr/Recent content in SS-SVR: Semi-supervised Support Vector Regression on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +SS-SVR: Semi-supervised Support Vector Regression on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/regression/ss_svr/Recent content in SS-SVR: Semi-supervised Support Vector Regression on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/remote_sensing_aplications/altb/index.html b/code/remote_sensing_aplications/altb/index.html index 3678a089..9bc3a45a 100644 --- a/code/remote_sensing_aplications/altb/index.html +++ b/code/remote_sensing_aplications/altb/index.html @@ -2,4 +2,4 @@

ALTB: Active Learning MATLAB(tm) Toolbox

\ No newline at end of file +

ALTB: Active Learning MATLAB(tm) Toolbox

\ No newline at end of file diff --git a/code/remote_sensing_aplications/altb/index.xml b/code/remote_sensing_aplications/altb/index.xml index b5a683e4..23b40451 100644 --- a/code/remote_sensing_aplications/altb/index.xml +++ b/code/remote_sensing_aplications/altb/index.xml @@ -1 +1 @@ -ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/remote_sensing_aplications/altb/Recent content in ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/remote_sensing_aplications/altb/Recent content in ALTB: Active Learning MATLAB(tm) Toolbox on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/remote_sensing_aplications/index.html b/code/remote_sensing_aplications/index.html index 8c1d710d..0cb48186 100644 --- a/code/remote_sensing_aplications/index.html +++ b/code/remote_sensing_aplications/index.html @@ -2,4 +2,4 @@

Remote sensing applications

ALTB: Active Learning MATLAB(tm) Toolbox

Active Learning Toolbox Image

ALTB is a set of tools implementing state-of-the-art active learning algorithms for remote sensing applications.

References
  • Semisupervised classification of remote sensing images with active queries. Munoz-Mari, J., Tuia, D., and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing 50(10): 3751-3763, 2012.
  • Remote sensing image segmentation by active queries. Tuia, D., Muñoz-Marí, J., Camps-Valls, G. Pattern Recognition 45(6): 2180-2192, 2012.

Kernel Vegetation Indices

Kernel Vegetation Indices Image

Empirical vegetation indices derived from spectral reflectance data are widely used in remote sensing of the biosphere. Here we generalize the family of commonly used vegetation indices by exploiting all higher-order relations between spectral channels. This results in higher sensitivity to vegetation biophysical and physiological parameters, improving the monitoring of key parameters such as leaf area index and gross primary productivity.

References

MERIS/AATSR Synergy Cloud Screening

Cloud Screening Image

A module for the BEAM platform that provides cloud screening within the MERIS/AATSR Synergy Toolbox. This toolbox offers processing schemes for improved cloud screening, aerosol retrieval, and land atmospheric correction using combined multi-spectral and multi-angle information from MERIS and AATSR measurements.

Randomized Kernels for Large Scale Earth Observation Applications

Randomized Kernels Image

Kernel methods are powerful machine learning algorithms, widely used in remote sensing and geosciences. This paper introduces an efficient kernel method for fast statistical retrieval of atmospheric and biophysical parameters. The method approximates a kernel matrix with projections on random bases sampled from the Fourier domain.

SIMFEAT: A simple MATLAB(tm) toolbox of linear and kernel feature extraction

SIMFEAT Toolbox Image

Toolbox for linear and kernel feature extraction, including PCA, MNF, CCA, PLS, OPLS, and kernel methods like KPCA, KMNF, KCCA, KPLS, KOPLS, and KECA.

References
  • Kernel multivariate analysis framework for supervised subspace learning: A tutorial on linear and kernel multivariate methods. Arenas-Garcia et al., IEEE Signal Processing Magazine, 30(4):16-29, 2013.

simpleClass: Simple Classification Toolbox

simpleClass Toolbox Image

A set of train-test simple educational functions for data classification, including methods like LDA, QDA, decision trees, random forests, SVM, Boosting, and Gaussian process classifiers.

simpleR v2.1: simple Regression toolbox

simpleR Image

The simple Regression toolbox, simpleR, contains a set of functions in Matlab to illustrate the capabilities of several statistical regression algorithms. simpleR contains simple educational code for linear regression (LR), decision trees (TREE), neural networks (NN), support vector regression (SVR), kernel ridge regression (KRR), Gaussian Process Regression (GPR), and Variational Heteroscedastic Gaussian Process Regression (VHGPR). A dataset of spectra and associated chlorophyll content is included to illustrate training/testing procedures.

References
  • Retrieval of biophysical parameters with heteroscedastic Gaussian processes. Lázaro-Gredilla, M., Titsias, M.K., Verrelst, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(4):838-842, 2014.
  • Prediction of daily global solar irradiation using temporal Gaussian processes. Salcedo-Sanz, S., Casanova-Mateo, C., Muñoz-Marí, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(11):1936-1940, 2014.

simpleUnmix: simple Unmixing and Abundance estimation toolbox

simpleUnmix Toolbox Image

The simple Unmixing toolbox contains a set of Matlab functions for spectral unmixing, including endmember determination methods, spectral unmixing, and abundance estimation.

References
  • Remote Sensing Image Processing. Camps-Valls, G. et al., Synthesis Lectures on Image, Video, and Multimedia Processing, Morgan & Claypool Publishers, 2011.

\ No newline at end of file +

Remote sensing applications

ALTB: Active Learning MATLAB(tm) Toolbox

Active Learning Toolbox Image

ALTB is a set of tools implementing state-of-the-art active learning algorithms for remote sensing applications.

References
  • Semisupervised classification of remote sensing images with active queries. Munoz-Mari, J., Tuia, D., and Camps-Valls, G. IEEE Transactions on Geoscience and Remote Sensing 50(10): 3751-3763, 2012.
  • Remote sensing image segmentation by active queries. Tuia, D., Muñoz-Marí, J., Camps-Valls, G. Pattern Recognition 45(6): 2180-2192, 2012.

Kernel Vegetation Indices

Kernel Vegetation Indices Image

Empirical vegetation indices derived from spectral reflectance data are widely used in remote sensing of the biosphere. Here we generalize the family of commonly used vegetation indices by exploiting all higher-order relations between spectral channels. This results in higher sensitivity to vegetation biophysical and physiological parameters, improving the monitoring of key parameters such as leaf area index and gross primary productivity.

References

MERIS/AATSR Synergy Cloud Screening

Cloud Screening Image

A module for the BEAM platform that provides cloud screening within the MERIS/AATSR Synergy Toolbox. This toolbox offers processing schemes for improved cloud screening, aerosol retrieval, and land atmospheric correction using combined multi-spectral and multi-angle information from MERIS and AATSR measurements.

Randomized Kernels for Large Scale Earth Observation Applications

Randomized Kernels Image

Kernel methods are powerful machine learning algorithms, widely used in remote sensing and geosciences. This paper introduces an efficient kernel method for fast statistical retrieval of atmospheric and biophysical parameters. The method approximates a kernel matrix with projections on random bases sampled from the Fourier domain.

SIMFEAT: A simple MATLAB(tm) toolbox of linear and kernel feature extraction

SIMFEAT Toolbox Image

Toolbox for linear and kernel feature extraction, including PCA, MNF, CCA, PLS, OPLS, and kernel methods like KPCA, KMNF, KCCA, KPLS, KOPLS, and KECA.

References
  • Kernel multivariate analysis framework for supervised subspace learning: A tutorial on linear and kernel multivariate methods. Arenas-Garcia et al., IEEE Signal Processing Magazine, 30(4):16-29, 2013.

simpleClass: Simple Classification Toolbox

simpleClass Toolbox Image

A set of train-test simple educational functions for data classification, including methods like LDA, QDA, decision trees, random forests, SVM, Boosting, and Gaussian process classifiers.

simpleR v2.1: simple Regression toolbox

simpleR Image

The simple Regression toolbox, simpleR, contains a set of functions in Matlab to illustrate the capabilities of several statistical regression algorithms. simpleR contains simple educational code for linear regression (LR), decision trees (TREE), neural networks (NN), support vector regression (SVR), kernel ridge regression (KRR), Gaussian Process Regression (GPR), and Variational Heteroscedastic Gaussian Process Regression (VHGPR). A dataset of spectra and associated chlorophyll content is included to illustrate training/testing procedures.

References
  • Retrieval of biophysical parameters with heteroscedastic Gaussian processes. Lázaro-Gredilla, M., Titsias, M.K., Verrelst, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(4):838-842, 2014.
  • Prediction of daily global solar irradiation using temporal Gaussian processes. Salcedo-Sanz, S., Casanova-Mateo, C., Muñoz-Marí, J., and Camps-Valls, G. IEEE Geoscience and Remote Sensing Letters, 11(11):1936-1940, 2014.

simpleUnmix: simple Unmixing and Abundance estimation toolbox

simpleUnmix Toolbox Image

The simple Unmixing toolbox contains a set of Matlab functions for spectral unmixing, including endmember determination methods, spectral unmixing, and abundance estimation.

References
  • Remote Sensing Image Processing. Camps-Valls, G. et al., Synthesis Lectures on Image, Video, and Multimedia Processing, Morgan & Claypool Publishers, 2011.

\ No newline at end of file diff --git a/code/remote_sensing_aplications/index.xml b/code/remote_sensing_aplications/index.xml index d8e0dbc7..a7c52770 100644 --- a/code/remote_sensing_aplications/index.xml +++ b/code/remote_sensing_aplications/index.xml @@ -1 +1 @@ -Remote sensing applications on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/remote_sensing_aplications/Recent content in Remote sensing applications on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Remote sensing applications on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/remote_sensing_aplications/Recent content in Remote sensing applications on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/remote_sensing_aplications/kernel_vegetations_indices/index.html b/code/remote_sensing_aplications/kernel_vegetations_indices/index.html index 2f588f6c..f34c4718 100644 --- a/code/remote_sensing_aplications/kernel_vegetations_indices/index.html +++ b/code/remote_sensing_aplications/kernel_vegetations_indices/index.html @@ -2,4 +2,4 @@

Kernel Vegetation Indices

\ No newline at end of file +

Kernel Vegetation Indices

\ No newline at end of file diff --git a/code/remote_sensing_aplications/kernel_vegetations_indices/index.xml b/code/remote_sensing_aplications/kernel_vegetations_indices/index.xml index 1cfad165..5905af75 100644 --- a/code/remote_sensing_aplications/kernel_vegetations_indices/index.xml +++ b/code/remote_sensing_aplications/kernel_vegetations_indices/index.xml @@ -1 +1 @@ -Kernel Vegetation Indices on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/remote_sensing_aplications/kernel_vegetations_indices/Recent content in Kernel Vegetation Indices on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Kernel Vegetation Indices on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/remote_sensing_aplications/kernel_vegetations_indices/Recent content in Kernel Vegetation Indices on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/remote_sensing_aplications/meris_aatsr/index.html b/code/remote_sensing_aplications/meris_aatsr/index.html index 4fa1af93..650b656c 100644 --- a/code/remote_sensing_aplications/meris_aatsr/index.html +++ b/code/remote_sensing_aplications/meris_aatsr/index.html @@ -2,4 +2,4 @@

MERIS/AATSR Synergy Cloud Screening

\ No newline at end of file +

MERIS/AATSR Synergy Cloud Screening

\ No newline at end of file diff --git a/code/remote_sensing_aplications/meris_aatsr/index.xml b/code/remote_sensing_aplications/meris_aatsr/index.xml index 60bca648..6c672866 100644 --- a/code/remote_sensing_aplications/meris_aatsr/index.xml +++ b/code/remote_sensing_aplications/meris_aatsr/index.xml @@ -1 +1 @@ -MERIS/AATSR Synergy Cloud Screening on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/remote_sensing_aplications/meris_aatsr/Recent content in MERIS/AATSR Synergy Cloud Screening on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +MERIS/AATSR Synergy Cloud Screening on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/remote_sensing_aplications/meris_aatsr/Recent content in MERIS/AATSR Synergy Cloud Screening on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/code/remote_sensing_aplications/randomized_kernels/content/index.html b/code/remote_sensing_aplications/randomized_kernels/content/index.html index 83e596ae..144d45d2 100644 --- a/code/remote_sensing_aplications/randomized_kernels/content/index.html +++ b/code/remote_sensing_aplications/randomized_kernels/content/index.html @@ -2,4 +2,4 @@

Randomized Kernels for Large Scale Earth Observation Applications

Current remote sensing applications for bio-geophysical parameter estimation and image classification handle unprecedented amounts of heterogeneous and complex data. New satellite sensors with higher time, space, and wavelength resolutions present computational challenges. Standard inversion techniques struggle to keep up with these new data sources, and classifying land cover from these new images requires substantial memory and processing power. Statistical learning has aided in developing models that can process large-scale Earth observation data.

Kernel methods are powerful machine learning algorithms widely used in remote sensing and geosciences. However, they are not broadly adopted due to high computational costs when applied to large-scale problems. This paper introduces an efficient kernel method for fast statistical retrieval of atmospheric and biophysical parameters, as well as for image classification. It uses projections on random Fourier features, approximating shift-invariant kernels, enabling large-scale data processing with kernel methods. The approach is computationally efficient, memory-conscious, and parallelizable.

We demonstrate the feasibility of kernel regression and classification on datasets with millions of samples. Examples include atmospheric parameter retrieval from IASI/Metop hyperspectral infrared sounders, large-scale inversion of the PROSAIL model on Sentinel-2 data, and cloud identification over landmarks in time series from MSG/Seviri images.

This webpage includes supplementary material to the paper Randomized Kernels for large scale Earth Observation applications by Pérez-Suay et al. in Remote Sensing of Environment, 2017. We include MATLAB illustrative code and videos showing operational performance of the proposed method in the particular case of cloud identification over landmarks. If you find the paper or provided MATLAB code below interesting and useful, please cite the work.

References

Download

\ No newline at end of file +

Randomized Kernels for Large Scale Earth Observation Applications

Current remote sensing applications for bio-geophysical parameter estimation and image classification handle unprecedented amounts of heterogeneous and complex data. New satellite sensors with higher time, space, and wavelength resolutions present computational challenges. Standard inversion techniques struggle to keep up with these new data sources, and classifying land cover from these new images requires substantial memory and processing power. Statistical learning has aided in developing models that can process large-scale Earth observation data.

Kernel methods are powerful machine learning algorithms widely used in remote sensing and geosciences. However, they are not broadly adopted due to high computational costs when applied to large-scale problems. This paper introduces an efficient kernel method for fast statistical retrieval of atmospheric and biophysical parameters, as well as for image classification. It uses projections on random Fourier features, approximating shift-invariant kernels, enabling large-scale data processing with kernel methods. The approach is computationally efficient, memory-conscious, and parallelizable.

We demonstrate the feasibility of kernel regression and classification on datasets with millions of samples. Examples include atmospheric parameter retrieval from IASI/Metop hyperspectral infrared sounders, large-scale inversion of the PROSAIL model on Sentinel-2 data, and cloud identification over landmarks in time series from MSG/Seviri images.

This webpage includes supplementary material to the paper Randomized Kernels for large scale Earth Observation applications by Pérez-Suay et al. in Remote Sensing of Environment, 2017. We include MATLAB illustrative code and videos showing operational performance of the proposed method in the particular case of cloud identification over landmarks. If you find the paper or provided MATLAB code below interesting and useful, please cite the work.

References

Download

\ No newline at end of file diff --git a/code/remote_sensing_aplications/randomized_kernels/index.html b/code/remote_sensing_aplications/randomized_kernels/index.html index b643f725..67546677 100644 --- a/code/remote_sensing_aplications/randomized_kernels/index.html +++ b/code/remote_sensing_aplications/randomized_kernels/index.html @@ -2,4 +2,4 @@

Randomized Kernels for Large Scale Earth Observation Applications

\ No newline at end of file +

Randomized Kernels for Large Scale Earth Observation Applications

\ No newline at end of file diff --git a/code/remote_sensing_aplications/randomized_kernels/index.xml b/code/remote_sensing_aplications/randomized_kernels/index.xml index a9084175..fd95b850 100644 --- a/code/remote_sensing_aplications/randomized_kernels/index.xml +++ b/code/remote_sensing_aplications/randomized_kernels/index.xml @@ -1 +1 @@ -Randomized Kernels for Large Scale Earth Observation Applications on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/code/remote_sensing_aplications/randomized_kernels/Recent content in Randomized Kernels for Large Scale Earth Observation Applications on ISP - Image and Signal Processing groupHugoen-usCloud detection over landmarks from MSG SEVIRI imageshttps://ipl-uv.github.io/code/remote_sensing_aplications/randomized_kernels/videos/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/code/remote_sensing_aplications/randomized_kernels/videos/Randomized Kernels for Large Scale Earth Observation Applicationshttps://ipl-uv.github.io/code/remote_sensing_aplications/randomized_kernels/content/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/code/remote_sensing_aplications/randomized_kernels/content/ \ No newline at end of file +Randomized Kernels for Large Scale Earth Observation Applications on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/code/remote_sensing_aplications/randomized_kernels/Recent content in Randomized Kernels for Large Scale Earth Observation Applications on ISP - Image and Signal Processing groupHugoen-usCloud detection over landmarks from MSG SEVIRI imageshttps://isp.uv.es/github/code/remote_sensing_aplications/randomized_kernels/videos/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/code/remote_sensing_aplications/randomized_kernels/videos/Randomized Kernels for Large Scale Earth Observation Applicationshttps://isp.uv.es/github/code/remote_sensing_aplications/randomized_kernels/content/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/code/remote_sensing_aplications/randomized_kernels/content/ \ No newline at end of file diff --git a/code/remote_sensing_aplications/randomized_kernels/videos/index.html b/code/remote_sensing_aplications/randomized_kernels/videos/index.html index 0825acd8..407dc6d4 100644 --- a/code/remote_sensing_aplications/randomized_kernels/videos/index.html +++ b/code/remote_sensing_aplications/randomized_kernels/videos/index.html @@ -2,7 +2,7 @@

Cloud detection over landmarks from MSG SEVIRI images

We cast the problem of cloud identification over landmarks on Meteosat Second Generation (MSG) SEVIRI data. This satellite mission constitutes a fundamental tool for weather forecasting, providing images of the full Earth disc every 15 minutes. Matching the landmarks accurately is of paramount importance in image navigation and registration (INR) models and geometric quality assessment (GQA) in the Level 1 instrument processing chain. Cloud contamination detection over landmarks is an essential step in the MSG processing chain, as undetected clouds are one of the most significant sources of error in landmark matching.

The landmark matching application requires only a binary detection. Furthermore, the cloud detection has to be carried out in real-time to be included in the landmark matching MSG processing chain, which implies efficient and robust detection schemes. Therefore, the proposed classification scheme is designed evaluating the complexity, the scalability, and parallelization of computations.

The dataset provided by EUMETSAT contains MSG/SEVIRI Level 1.5 acquisitions for 200 landmarks of variable size for a whole year, which are mainly located over the coastline, islands, or inland waters. A full Earth disk image is acquired every 15 minutes, which produces 96 images per day and results in 35040 images (or chips) per landmark in 2010. Additionally, Level 2 cloud products were provided for each landmark observation so the Level 2 cloud mask is used as the best available `ground truth’ to validate the results. Summarizing, in this problem, we have to deal with near 7 million MSG/SEVIRI multispectral images acquired during 2010.

The RKS method was used for cloud identification over landmarks. This was done in the context of a project titled Study on pattern recognition based cloud detection over landmarks in collaboration with EUMETSAT European Organisation for the Exploitation of Meteorological Satellites, 01/15 - 11/15. Below we show a set of illustrative videos where the best classifier results are show for particular landmarks. We show the L2 cloud mask and our classifier predictions, along with the cumulative accuracy per acquisition.

We published learning material on geoscience and remote sensing data processing in IEEE GRSS training material and many more online available!

\ No newline at end of file diff --git a/news/2016/maria-piles-fellowship/index.html b/news/2016/maria-piles-fellowship/index.html index db413aaa..3d9568d9 100644 --- a/news/2016/maria-piles-fellowship/index.html +++ b/news/2016/maria-piles-fellowship/index.html @@ -2,4 +2,4 @@

Dr. Maria Piles joins the ISP group through a prestigious “Ramón y Cajal” fellowship next January 2017. The ISP consolidates the research line of retrieval of soil moisture and vegetation biogeophysical parameters from space observations (microwave radiometers, radars and hyperspectral imagers). Welcome Maria!

\ No newline at end of file +

Dr. Maria Piles joins the ISP group through a prestigious “Ramón y Cajal” fellowship next January 2017. The ISP consolidates the research line of retrieval of soil moisture and vegetation biogeophysical parameters from space observations (microwave radiometers, radars and hyperspectral imagers). Welcome Maria!

\ No newline at end of file diff --git a/news/2016/sedal-grant/index.html b/news/2016/sedal-grant/index.html index 6bdb4646..8947c120 100644 --- a/news/2016/sedal-grant/index.html +++ b/news/2016/sedal-grant/index.html @@ -2,6 +2,6 @@

News


0001

\ No newline at end of file +

News


0001

\ No newline at end of file diff --git a/news/2016/sedal-grant/index.xml b/news/2016/sedal-grant/index.xml index f151c567..dfaa6809 100644 --- a/news/2016/sedal-grant/index.xml +++ b/news/2016/sedal-grant/index.xml @@ -1,3 +1,3 @@ -SEDAL Grant on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/news/2016/sedal-grant/Recent content in SEDAL Grant on ISP - Image and Signal Processing groupHugoen-usMon, 01 Jan 0001 00:00:00 +0000<link>https://ipl-uv.github.io/news/2016/sedal-grant/main_objetives/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://ipl-uv.github.io/news/2016/sedal-grant/main_objetives/</guid><description>Objectives Improve prediction models by adaptation to Earth Observation data characteristics. We will rely on the framework of kernel learning, which has emerged as the most appropriate framework for remote sensing data analysis in the last decade. The new retrieval models will be adapted to the particular signal characteristics, such as unevenly sampled time series and missing data, non-Gaussianity, presence of heteroscedastic and non-stationary processes, and non-i.i.d. (spatial and temporal) relations.</description></item><item><title/><link>https://ipl-uv.github.io/news/2016/sedal-grant/motivation/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://ipl-uv.github.io/news/2016/sedal-grant/motivation/</guid><description>Motivation Despite the many successful results and developments, there are still strong limitations for the general adoption of machine learning algorithms for predicting and understanding EO data. Machine learning and signal processing have advanced enormously in the last decade (both at theoretical and applied levels) but have not moved forward the field of EO data analysis to its full potential. -The current statistical treatment of biophysical parameters is strongly limited by the quantity and quality of EO data, as well as by the abuse of standard off-the-shelf methods, which, in general, are not well-adapted to the particular characteristics of EO data.</description></item><item><title/><link>https://ipl-uv.github.io/news/2016/sedal-grant/project_structure/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://ipl-uv.github.io/news/2016/sedal-grant/project_structure/</guid><description>Methodology Activities are organized in three major tasks: two theoretical tasks guided by an application-oriented one dealing with relevant EO problems. -Workpackage 1: Improving Statistical Regression Models We will develop new kernel regression models to cope with the shortcomings identified before, namely: improve model’s accuracy by encoding prior knowledge, quantify the uncertainty of the estimations, attain self-explanatory models, and alleviate the computational cost. We will develop ways to encode prior knowledge about the problem by design of kernels and neural structures able to:</description></item><item><title/><link>https://ipl-uv.github.io/news/2016/sedal-grant/proporsal/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://ipl-uv.github.io/news/2016/sedal-grant/proporsal/</guid><description>SEDAL Project Proposals B1 Proposal B2 Proposal Interview Slides Reporting Continuous Reporting: 01/09/2015 - 28/02/2017 Mid-term Report: 01/09/2015 - 28/02/2018 Outreach Presentations Advanced Applications in AI (AAA) Algorithms and Analysis (AAA) Applied Analytics for Agriculture (AAA) Atmospheric and Aerial Analysis (AAA) The SEDAL project is an interdisciplinary effort to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. The project focuses on improving prediction models, discovering knowledge and causal relations in EO data, and contributing to various remote sensing applications.</description></item><item><title/><link>https://ipl-uv.github.io/news/2016/sedal-grant/sedal/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://ipl-uv.github.io/news/2016/sedal-grant/sedal/</guid><description>SEDAL: Statistical Learning for Earth Observation Data Analysis SEDAL is a research project funded by the European Research Council (ERC) Consolidator Grant 2015-2020, and directed by Prof. Gustau Camps-Valls at the Universitat de València, Spain. SEDAL is an interdisciplinary project that aims to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. In the last decade, learning models have helped to monitor land, oceans, and atmosphere through the analysis and estimation of climate and biophysical parameters.</description></item></channel></rss> \ No newline at end of file +<?xml version="1.0" encoding="utf-8" standalone="yes"?><rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"><channel><title>SEDAL Grant on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/news/2016/sedal-grant/Recent content in SEDAL Grant on ISP - Image and Signal Processing groupHugoen-usMon, 01 Jan 0001 00:00:00 +0000<link>https://isp.uv.es/github/news/2016/sedal-grant/main_objetives/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://isp.uv.es/github/news/2016/sedal-grant/main_objetives/</guid><description>Objectives Improve prediction models by adaptation to Earth Observation data characteristics. We will rely on the framework of kernel learning, which has emerged as the most appropriate framework for remote sensing data analysis in the last decade. The new retrieval models will be adapted to the particular signal characteristics, such as unevenly sampled time series and missing data, non-Gaussianity, presence of heteroscedastic and non-stationary processes, and non-i.i.d. (spatial and temporal) relations.</description></item><item><title/><link>https://isp.uv.es/github/news/2016/sedal-grant/motivation/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://isp.uv.es/github/news/2016/sedal-grant/motivation/</guid><description>Motivation Despite the many successful results and developments, there are still strong limitations for the general adoption of machine learning algorithms for predicting and understanding EO data. Machine learning and signal processing have advanced enormously in the last decade (both at theoretical and applied levels) but have not moved forward the field of EO data analysis to its full potential. +The current statistical treatment of biophysical parameters is strongly limited by the quantity and quality of EO data, as well as by the abuse of standard off-the-shelf methods, which, in general, are not well-adapted to the particular characteristics of EO data.</description></item><item><title/><link>https://isp.uv.es/github/news/2016/sedal-grant/project_structure/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://isp.uv.es/github/news/2016/sedal-grant/project_structure/</guid><description>Methodology Activities are organized in three major tasks: two theoretical tasks guided by an application-oriented one dealing with relevant EO problems. +Workpackage 1: Improving Statistical Regression Models We will develop new kernel regression models to cope with the shortcomings identified before, namely: improve model’s accuracy by encoding prior knowledge, quantify the uncertainty of the estimations, attain self-explanatory models, and alleviate the computational cost. We will develop ways to encode prior knowledge about the problem by design of kernels and neural structures able to:</description></item><item><title/><link>https://isp.uv.es/github/news/2016/sedal-grant/proporsal/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://isp.uv.es/github/news/2016/sedal-grant/proporsal/</guid><description>SEDAL Project Proposals B1 Proposal B2 Proposal Interview Slides Reporting Continuous Reporting: 01/09/2015 - 28/02/2017 Mid-term Report: 01/09/2015 - 28/02/2018 Outreach Presentations Advanced Applications in AI (AAA) Algorithms and Analysis (AAA) Applied Analytics for Agriculture (AAA) Atmospheric and Aerial Analysis (AAA) The SEDAL project is an interdisciplinary effort to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. The project focuses on improving prediction models, discovering knowledge and causal relations in EO data, and contributing to various remote sensing applications.</description></item><item><title/><link>https://isp.uv.es/github/news/2016/sedal-grant/sedal/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://isp.uv.es/github/news/2016/sedal-grant/sedal/</guid><description>SEDAL: Statistical Learning for Earth Observation Data Analysis SEDAL is a research project funded by the European Research Council (ERC) Consolidator Grant 2015-2020, and directed by Prof. Gustau Camps-Valls at the Universitat de València, Spain. SEDAL is an interdisciplinary project that aims to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. In the last decade, learning models have helped to monitor land, oceans, and atmosphere through the analysis and estimation of climate and biophysical parameters.</description></item></channel></rss> \ No newline at end of file diff --git a/news/2016/sedal-grant/main_objetives/index.html b/news/2016/sedal-grant/main_objetives/index.html index 7fb8b62a..572a007e 100644 --- a/news/2016/sedal-grant/main_objetives/index.html +++ b/news/2016/sedal-grant/main_objetives/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=objectives>Objectives</h1><p><strong>Improve prediction models by adaptation to Earth Observation data characteristics.</strong> We will rely on the framework of kernel learning, which has emerged as the most appropriate framework for remote sensing data analysis in the last decade. The new retrieval models will be adapted to the particular signal characteristics, such as unevenly sampled time series and missing data, non-Gaussianity, presence of heteroscedastic and non-stationary processes, and non-i.i.d. (spatial and temporal) relations. Models based on kernels and GPs will allow us to advance in uncertainty quantification using predictive variances under biophysical constraints. Advances in sparse, reduced-rank and divide-and-conquer schemes will address the computational cost problem. The proposed kernel framework aims to improve results in terms of accuracy, reduced uncertainty, consistency of the estimations and computational efficiency.</p><p><strong>Discover knowledge and causal relations in Earth observation data.</strong> We will investigate graphical causal models and regression-based causal schemes applied to large heterogeneous EO data streams. This will require improved measures of (conditional) independence, designing experiments in controlled situations and using high-quality data. Learning the hierarchy of the relations between variables and related covariates, as well as their causal relations, may in turn allow the discovery of hidden essential variables, drivers and confounders. Moving from correlation to dependence and then to causation concepts is fundamental to advance the field of Earth Observation and the science of climate change.</p><h1 id=research>Research</h1><p>SEDAL aims at contributing novel machine learning algorithms along these lines:</p><ul><li>Advanced remote sensing data and EO time series processing and statistical characterization</li><li>Advanced regression methods, involving kernel methods, Gaussian processes, random forests, and deep nets</li><li>Efficient large-scale model implementations</li><li>Uncertainty quantification and propagation</li><li>Physically-based models, emulation of RTMs, and design of physically-meaningful priors in machine learning regression</li><li>Knowledge discovery and structure learning from empirical EO data</li><li>(Conditional) Dependence estimation of EO variables and observations</li><li>Graphical models, structure learning, Bayesian networks and causal inference from empirical EO data</li></ul><p>The target EO applications are:</p><ul><li>Improved retrieval (regression) algorithms at local, regional, and global planetary scales</li><li>Structure inference and relevance determination of essential climate variables and observations</li><li>Climate change detection, anomalies, extremes, and causal inference attribution</li></ul></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=objectives>Objectives</h1><p><strong>Improve prediction models by adaptation to Earth Observation data characteristics.</strong> We will rely on the framework of kernel learning, which has emerged as the most appropriate framework for remote sensing data analysis in the last decade. The new retrieval models will be adapted to the particular signal characteristics, such as unevenly sampled time series and missing data, non-Gaussianity, presence of heteroscedastic and non-stationary processes, and non-i.i.d. (spatial and temporal) relations. Models based on kernels and GPs will allow us to advance in uncertainty quantification using predictive variances under biophysical constraints. Advances in sparse, reduced-rank and divide-and-conquer schemes will address the computational cost problem. The proposed kernel framework aims to improve results in terms of accuracy, reduced uncertainty, consistency of the estimations and computational efficiency.</p><p><strong>Discover knowledge and causal relations in Earth observation data.</strong> We will investigate graphical causal models and regression-based causal schemes applied to large heterogeneous EO data streams. This will require improved measures of (conditional) independence, designing experiments in controlled situations and using high-quality data. Learning the hierarchy of the relations between variables and related covariates, as well as their causal relations, may in turn allow the discovery of hidden essential variables, drivers and confounders. Moving from correlation to dependence and then to causation concepts is fundamental to advance the field of Earth Observation and the science of climate change.</p><h1 id=research>Research</h1><p>SEDAL aims at contributing novel machine learning algorithms along these lines:</p><ul><li>Advanced remote sensing data and EO time series processing and statistical characterization</li><li>Advanced regression methods, involving kernel methods, Gaussian processes, random forests, and deep nets</li><li>Efficient large-scale model implementations</li><li>Uncertainty quantification and propagation</li><li>Physically-based models, emulation of RTMs, and design of physically-meaningful priors in machine learning regression</li><li>Knowledge discovery and structure learning from empirical EO data</li><li>(Conditional) Dependence estimation of EO variables and observations</li><li>Graphical models, structure learning, Bayesian networks and causal inference from empirical EO data</li></ul><p>The target EO applications are:</p><ul><li>Improved retrieval (regression) algorithms at local, regional, and global planetary scales</li><li>Structure inference and relevance determination of essential climate variables and observations</li><li>Climate change detection, anomalies, extremes, and causal inference attribution</li></ul></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2016/sedal-grant/motivation/index.html b/news/2016/sedal-grant/motivation/index.html index d781c4af..44646ecf 100644 --- a/news/2016/sedal-grant/motivation/index.html +++ b/news/2016/sedal-grant/motivation/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=motivation>Motivation</h1><p>Despite the many successful results and developments, there are still strong limitations for the general adoption of machine learning algorithms for predicting and understanding EO data. Machine learning and signal processing have advanced enormously in the last decade (both at theoretical and applied levels) but have not moved forward the field of EO data analysis to its full potential.</p><p>The current statistical treatment of biophysical parameters is strongly limited by the quantity and quality of EO data, as well as by the abuse of standard off-the-shelf methods, which, in general, are not well-adapted to the particular characteristics of EO data. Specifically, current regression models used for EO applications are still deficient because they rely on limited amounts of meteorological and remote sensing data, do not observe the particular data characteristics, and often make strong assumptions of linearity, homoscedasticity, or Gaussianity. These limitations translate into certain risks of overfitting and unreasonably large uncertainties for the predictions, suggesting a lack of explanatory variables and deficiencies in model specification. Graphical models have been seldom used in EO data analysis. The few works restrict to local studies, use limited amounts of data and explanatory variables, consider remote sensing input features only, apply standard structure learning algorithms driven by univariate (often unconditioned) dependence estimates, and do not extract causal relations or identify new drivers in the problem.</p><p>We advocate that machine learning algorithms for EO applications need to be guided both by data and by prior physical knowledge. This combination is the way to restrict the family of possible solutions and thus obtain nonparametric flexible models that respect the physical rules governing the Earth climate system. We are equally concerned about the ‘black-box’ criticism of statistical learning algorithms, for which we aim to design self-explanatory models and take a leap towards the relevant concept of causal inference from empirical EO data.</p><h1 id=related-projects>Related Projects</h1><br><h2 id=cloud-detection-in-the-cloud>Cloud Detection in the Cloud</h2><ul><li><strong>Google Earth Engine Research Award, L. Gomez-Chova</strong></li><li>01/16 - 12/17</li><li><a href=/old_pages/other/cloud_detection.html>Cloud detection in the cloud</a></li></ul><h2 id=life-vision-learning-image-features-to-encode-visual-information>LIFE-VISION: Learning Image Features to Encode Visual Information</h2><ul><li><strong>Spanish Ministry of Economy and Competitiveness, 2012. TIN2012-38102-C03-01</strong></li><li>01/13 - 12/15</li><li><a href=http://lifevisionproject.wordpress.com/>LIFE-VISION</a></li></ul><h2 id=geolearn>GEOLEARN</h2><ul><li><strong>Spanish Ministry of Economy and Competitiveness</strong></li><li>2016</li><li><a href=/old_pages/other/motivation_sd.html>GEOLEARN</a></li></ul><h2 id=esa-cci-soil-moisture>ESA CCI Soil Moisture</h2><ul><li><strong>European Space Agency</strong></li><li><a href=http://esa-soilmoisture-cci.org/>ESA CCI Soil Moisture</a></li></ul></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=motivation>Motivation</h1><p>Despite the many successful results and developments, there are still strong limitations for the general adoption of machine learning algorithms for predicting and understanding EO data. Machine learning and signal processing have advanced enormously in the last decade (both at theoretical and applied levels) but have not moved forward the field of EO data analysis to its full potential.</p><p>The current statistical treatment of biophysical parameters is strongly limited by the quantity and quality of EO data, as well as by the abuse of standard off-the-shelf methods, which, in general, are not well-adapted to the particular characteristics of EO data. Specifically, current regression models used for EO applications are still deficient because they rely on limited amounts of meteorological and remote sensing data, do not observe the particular data characteristics, and often make strong assumptions of linearity, homoscedasticity, or Gaussianity. These limitations translate into certain risks of overfitting and unreasonably large uncertainties for the predictions, suggesting a lack of explanatory variables and deficiencies in model specification. Graphical models have been seldom used in EO data analysis. The few works restrict to local studies, use limited amounts of data and explanatory variables, consider remote sensing input features only, apply standard structure learning algorithms driven by univariate (often unconditioned) dependence estimates, and do not extract causal relations or identify new drivers in the problem.</p><p>We advocate that machine learning algorithms for EO applications need to be guided both by data and by prior physical knowledge. This combination is the way to restrict the family of possible solutions and thus obtain nonparametric flexible models that respect the physical rules governing the Earth climate system. We are equally concerned about the ‘black-box’ criticism of statistical learning algorithms, for which we aim to design self-explanatory models and take a leap towards the relevant concept of causal inference from empirical EO data.</p><h1 id=related-projects>Related Projects</h1><br><h2 id=cloud-detection-in-the-cloud>Cloud Detection in the Cloud</h2><ul><li><strong>Google Earth Engine Research Award, L. Gomez-Chova</strong></li><li>01/16 - 12/17</li><li><a href=/old_pages/other/cloud_detection.html>Cloud detection in the cloud</a></li></ul><h2 id=life-vision-learning-image-features-to-encode-visual-information>LIFE-VISION: Learning Image Features to Encode Visual Information</h2><ul><li><strong>Spanish Ministry of Economy and Competitiveness, 2012. TIN2012-38102-C03-01</strong></li><li>01/13 - 12/15</li><li><a href=http://lifevisionproject.wordpress.com/>LIFE-VISION</a></li></ul><h2 id=geolearn>GEOLEARN</h2><ul><li><strong>Spanish Ministry of Economy and Competitiveness</strong></li><li>2016</li><li><a href=/old_pages/other/motivation_sd.html>GEOLEARN</a></li></ul><h2 id=esa-cci-soil-moisture>ESA CCI Soil Moisture</h2><ul><li><strong>European Space Agency</strong></li><li><a href=http://esa-soilmoisture-cci.org/>ESA CCI Soil Moisture</a></li></ul></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2016/sedal-grant/project_structure/index.html b/news/2016/sedal-grant/project_structure/index.html index fbcbfbfa..1cc1eaea 100644 --- a/news/2016/sedal-grant/project_structure/index.html +++ b/news/2016/sedal-grant/project_structure/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=methodology>Methodology</h1><p>Activities are organized in three major tasks: two theoretical tasks guided by an application-oriented one dealing with relevant EO problems.</p><h2 id=workpackage-1-improving-statistical-regression-models>Workpackage 1: Improving Statistical Regression Models</h2><p>We will develop new kernel regression models to cope with the shortcomings identified before, namely: improve model’s accuracy by encoding prior knowledge, quantify the uncertainty of the estimations, attain self-explanatory models, and alleviate the computational cost. We will develop ways to encode prior knowledge about the problem by design of kernels and neural structures able to:</p><ol><li>Incorporate explicit physical restrictions based on warping functions.</li><li>Combine heterogeneous information for spatial-spectral, multi-temporal, multi-angular, and multi-sensor data processing.</li><li>Include the information of unlabeled samples via semisupervised covariances.</li><li>Predict multiple variables simultaneously in order to constrain predictions to sensible levels.</li><li>Account for signal and noise characteristics.</li><li>Deploy efficient (sparse and divide-and-conquer) kernel regression models.</li><li>Discover knowledge in kernel models.</li></ol><h2 id=workpackage-2-learning-graphical-models-and-causal-inference>Workpackage 2: Learning Graphical Models and Causal Inference</h2><p>We will exploit results and algorithms of the previous task in order to develop methods that can learn nonlinear data dependencies and possibly infer causal relations. We will propose:</p><ol><li>New conditional independence estimates.</li><li>Constrained-based (physically-based) structure learning.</li><li>Dynamic graphical models.</li><li>Causal inference models, mostly based on detection of PDF asymmetries and regression-based methods.</li></ol><p>Models and inferred structures will be tested through pure non-interventional settings, as well as intervention analyses in controlled situations, that might reveal the presence of hidden causal variables and relationships, and by quantifying the impact of prior (physical) knowledge.</p><h2 id=workpackage-3-case-study---from-local-to-global-scales-in-eo-variable-learning>Workpackage 3: Case Study - From Local to Global Scales in EO Variable Learning</h2><p>We will focus on the relevant applications of:</p><ol><li>Learning statistical predictive models for key biophysical variables.</li><li>Extracting knowledge from the models and the nonlinear hierarchical data representations.</li><li>Inferring causal variable relations from empirical data, both at local and global scales.</li></ol><h3 id=local-scale>Local Scale</h3><ul><li>Modeling biophysical parameters at local scale, primarily focusing on chlorophyll content, fluorescence, biomass, LAI, and fAPAR.</li><li>The study and quantification of uncertainty, inclusion of prior physical knowledge to constrain model’s flexibility, and the analysis of dependence/causal relations between variables will be the main scientific questions to be addressed.</li></ul><h3 id=global-scale>Global Scale</h3><ul><li>Generate global flux products derived from upscaling FLUXNET eddy covariance observations using an array of remote sensing data.</li><li>We will evaluate the developed regression algorithms, the relative relevance of explanatory variables, and will learn graph dependencies between remote sensing variables and carbon (e.g., total ecosystem respiration, net ecosystem exchange), energy (e.g., latent heat and heat radiation), and water (e.g., evapotranspiration) fluxes.</li><li>We will also study statistical relations between global products for both essential climate variables over land (biomass, LAI, and the fraction of absorbed photosynthetically active radiation, fAPAR).</li></ul><p>Both case studies will involve important efforts in open data harmonization (formats, centralized database server, access/sharing protocols, documentation, etc.) and open code generation (toolbox releases, products, models, etc.).</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=methodology>Methodology</h1><p>Activities are organized in three major tasks: two theoretical tasks guided by an application-oriented one dealing with relevant EO problems.</p><h2 id=workpackage-1-improving-statistical-regression-models>Workpackage 1: Improving Statistical Regression Models</h2><p>We will develop new kernel regression models to cope with the shortcomings identified before, namely: improve model’s accuracy by encoding prior knowledge, quantify the uncertainty of the estimations, attain self-explanatory models, and alleviate the computational cost. We will develop ways to encode prior knowledge about the problem by design of kernels and neural structures able to:</p><ol><li>Incorporate explicit physical restrictions based on warping functions.</li><li>Combine heterogeneous information for spatial-spectral, multi-temporal, multi-angular, and multi-sensor data processing.</li><li>Include the information of unlabeled samples via semisupervised covariances.</li><li>Predict multiple variables simultaneously in order to constrain predictions to sensible levels.</li><li>Account for signal and noise characteristics.</li><li>Deploy efficient (sparse and divide-and-conquer) kernel regression models.</li><li>Discover knowledge in kernel models.</li></ol><h2 id=workpackage-2-learning-graphical-models-and-causal-inference>Workpackage 2: Learning Graphical Models and Causal Inference</h2><p>We will exploit results and algorithms of the previous task in order to develop methods that can learn nonlinear data dependencies and possibly infer causal relations. We will propose:</p><ol><li>New conditional independence estimates.</li><li>Constrained-based (physically-based) structure learning.</li><li>Dynamic graphical models.</li><li>Causal inference models, mostly based on detection of PDF asymmetries and regression-based methods.</li></ol><p>Models and inferred structures will be tested through pure non-interventional settings, as well as intervention analyses in controlled situations, that might reveal the presence of hidden causal variables and relationships, and by quantifying the impact of prior (physical) knowledge.</p><h2 id=workpackage-3-case-study---from-local-to-global-scales-in-eo-variable-learning>Workpackage 3: Case Study - From Local to Global Scales in EO Variable Learning</h2><p>We will focus on the relevant applications of:</p><ol><li>Learning statistical predictive models for key biophysical variables.</li><li>Extracting knowledge from the models and the nonlinear hierarchical data representations.</li><li>Inferring causal variable relations from empirical data, both at local and global scales.</li></ol><h3 id=local-scale>Local Scale</h3><ul><li>Modeling biophysical parameters at local scale, primarily focusing on chlorophyll content, fluorescence, biomass, LAI, and fAPAR.</li><li>The study and quantification of uncertainty, inclusion of prior physical knowledge to constrain model’s flexibility, and the analysis of dependence/causal relations between variables will be the main scientific questions to be addressed.</li></ul><h3 id=global-scale>Global Scale</h3><ul><li>Generate global flux products derived from upscaling FLUXNET eddy covariance observations using an array of remote sensing data.</li><li>We will evaluate the developed regression algorithms, the relative relevance of explanatory variables, and will learn graph dependencies between remote sensing variables and carbon (e.g., total ecosystem respiration, net ecosystem exchange), energy (e.g., latent heat and heat radiation), and water (e.g., evapotranspiration) fluxes.</li><li>We will also study statistical relations between global products for both essential climate variables over land (biomass, LAI, and the fraction of absorbed photosynthetically active radiation, fAPAR).</li></ul><p>Both case studies will involve important efforts in open data harmonization (formats, centralized database server, access/sharing protocols, documentation, etc.) and open code generation (toolbox releases, products, models, etc.).</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2016/sedal-grant/proporsal/index.html b/news/2016/sedal-grant/proporsal/index.html index 5c951742..c2fb6377 100644 --- a/news/2016/sedal-grant/proporsal/index.html +++ b/news/2016/sedal-grant/proporsal/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=sedal-project>SEDAL Project</h1><br><h2 id=proposals>Proposals</h2><ul><li><strong>B1 Proposal</strong></li><li><strong>B2 Proposal</strong></li></ul><h2 id=interview-slides>Interview Slides</h2><br><h3 id=reporting>Reporting</h3><ul><li><strong>Continuous Reporting:</strong> 01/09/2015 - 28/02/2017</li><li><strong>Mid-term Report:</strong> 01/09/2015 - 28/02/2018</li></ul><h3 id=outreach-presentations>Outreach Presentations</h3><ul><li><strong>Advanced Applications in AI (AAA)</strong></li><li><strong>Algorithms and Analysis (AAA)</strong></li><li><strong>Applied Analytics for Agriculture (AAA)</strong></li><li><strong>Atmospheric and Aerial Analysis (AAA)</strong></li></ul><p>The SEDAL project is an interdisciplinary effort to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. The project focuses on improving prediction models, discovering knowledge and causal relations in EO data, and contributing to various remote sensing applications.</p><p>Through the development of kernel learning frameworks and graphical models, SEDAL aims to address current limitations in EO data analysis. The project’s methodologies involve enhancing statistical regression models, learning graphical models and causal inference, and conducting case studies from local to global scales.</p><p>SEDAL’s outreach efforts include multiple presentations and reports aimed at disseminating research findings and engaging with the broader scientific community. These presentations cover a wide range of applications and advancements in artificial intelligence and EO data analysis.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><h1 id=sedal-project>SEDAL Project</h1><br><h2 id=proposals>Proposals</h2><ul><li><strong>B1 Proposal</strong></li><li><strong>B2 Proposal</strong></li></ul><h2 id=interview-slides>Interview Slides</h2><br><h3 id=reporting>Reporting</h3><ul><li><strong>Continuous Reporting:</strong> 01/09/2015 - 28/02/2017</li><li><strong>Mid-term Report:</strong> 01/09/2015 - 28/02/2018</li></ul><h3 id=outreach-presentations>Outreach Presentations</h3><ul><li><strong>Advanced Applications in AI (AAA)</strong></li><li><strong>Algorithms and Analysis (AAA)</strong></li><li><strong>Applied Analytics for Agriculture (AAA)</strong></li><li><strong>Atmospheric and Aerial Analysis (AAA)</strong></li></ul><p>The SEDAL project is an interdisciplinary effort to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. The project focuses on improving prediction models, discovering knowledge and causal relations in EO data, and contributing to various remote sensing applications.</p><p>Through the development of kernel learning frameworks and graphical models, SEDAL aims to address current limitations in EO data analysis. The project’s methodologies involve enhancing statistical regression models, learning graphical models and causal inference, and conducting case studies from local to global scales.</p><p>SEDAL’s outreach efforts include multiple presentations and reports aimed at disseminating research findings and engaging with the broader scientific community. These presentations cover a wide range of applications and advancements in artificial intelligence and EO data analysis.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2016/sedal-grant/sedal/index.html b/news/2016/sedal-grant/sedal/index.html index c03c82b6..6c3b104f 100644 --- a/news/2016/sedal-grant/sedal/index.html +++ b/news/2016/sedal-grant/sedal/index.html @@ -2,7 +2,7 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><div class=container><div class=row><div class=col-md-8><h1><strong>SEDAL: Statistical Learning for Earth Observation Data Analysis</strong></h1><p align=justify>SEDAL is a research project funded by the European Research Council (ERC) Consolidator Grant 2015-2020, and directed by Prof. Gustau Camps-Valls at the Universitat de València, Spain. +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><div class=container><div class=row><div class=col-md-8><h1><strong>SEDAL: Statistical Learning for Earth Observation Data Analysis</strong></h1><p align=justify>SEDAL is a research project funded by the European Research Council (ERC) Consolidator Grant 2015-2020, and directed by Prof. Gustau Camps-Valls at the Universitat de València, Spain. SEDAL is an interdisciplinary project that aims to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. In the last decade, learning models have helped to monitor land, oceans, and atmosphere through the analysis and estimation of climate and biophysical parameters. Current approaches, however, cannot deal efficiently with the particular characteristics of remote sensing data. This problem increases with the operational EU Copernicus Sentinel services, and we face now the urgent need to process and understand huge amounts of complex, heterogeneous, multisource, and structured data to monitor the rapid changes already occurring in our Planet. SEDAL aims to develop the next generation of statistical inference methods for EO data analysis. We develop advanced regression methods to improve efficiency, prediction accuracy and uncertainties, encode physical knowledge about the problem, and attain self-explanatory models learned from empirical data. Even more importantly, we learn graphical causal models to explain the potentially complex interactions between key observed variables, and discover hidden essential drivers and confounding factors. This project tackles the fundamental problem of moving from correlation to dependence and then to causation through EO data analysis. The theoretical developments are guided by the challenging problems of estimating biophysical parameters and learning causal relations at both local and global planetary scales. Check out the details in the <a href=/news/2016/sedal-grant/proporsal/index.html>proposal, reporting and outreach section</a>.</p><br><div class="row align-items-end text-center"><div class=col-md-4><div class=image-container><a href=/news/2016/sedal-grant/motivation/index.html><img src=/images/fig1b.webp><br></a><a href=/news/2016/sedal-grant/motivation/index.html><b>Motivation</b></a></div></div><div class=col-md-4><div class=image-container><a href=/news/2016/sedal-grant/main_objetives/index.html><img src=/images/fig3.webp><br></a><a href=/news/2016/sedal-grant/main_objetives/index.html><b>Main Objectives</b></a></div></div><div class=col-md-4><div class=image-container><a href=/news/2016/sedal-grant/project_structure/index.html><img src=/images/fig0.webp><br></a><a href=/news/2016/sedal-grant/project_structure/index.html><b>Project Structure</b></a></div></div></div></div><div class=col-md-4><div class="panel panel-danger"><div class=panel-heading>News</div><div class=list-group><a href=/people class=list-group-item><b>The group is growing quite a lot!</b> New people with lots of ideas, background and expertises. Check their profiles and exciting research here!</a> <a href="https://scholar.google.es/citations?hl=en&user=MsMYu-IAAAAJ" class=list-group-item>We reached an h-index of 62. Follow our research here!</a> diff --git a/news/2017/composite-kernels/index.html b/news/2017/composite-kernels/index.html index 92d51e9e..d44e4624 100644 --- a/news/2017/composite-kernels/index.html +++ b/news/2017/composite-kernels/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>We are a classic! “Composite kernels for hyperspectral image classification” published in IEEE Geoscience and Remote Sensing Letters in 2006 is one of the highly-cited papers in its area of research, maintaining its impact long after publication.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>We are a classic! “Composite kernels for hyperspectral image classification” published in IEEE Geoscience and Remote Sensing Letters in 2006 is one of the highly-cited papers in its area of research, maintaining its impact long after publication.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2017/dsp-kernel-methods/index.html b/news/2017/dsp-kernel-methods/index.html index 35be53e5..2ffea8d7 100644 --- a/news/2017/dsp-kernel-methods/index.html +++ b/news/2017/dsp-kernel-methods/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>We are excited about the publication of our new book, entitled “Digital Signal Processing with Kernel Methods”, by Rojo-Álvarez, J.L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G., published by Wiley and sons.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>We are excited about the publication of our new book, entitled “Digital Signal Processing with Kernel Methods”, by Rojo-Álvarez, J.L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G., published by Wiley and sons.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2017/geoscience-committee/index.html b/news/2017/geoscience-committee/index.html index cfa21a66..e2635707 100644 --- a/news/2017/geoscience-committee/index.html +++ b/news/2017/geoscience-committee/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Maria Piles (president) and Gustau Camps-Valls (member) in the Executive Committee of the Spain Geoscience and Remote Sensing Chapter. Let’s do things to promote technology and science in remote sensing!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Maria Piles (president) and Gustau Camps-Valls (member) in the Executive Committee of the Spain Geoscience and Remote Sensing Chapter. Let’s do things to promote technology and science in remote sensing!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2017/global-carbon-sinks/index.html b/news/2017/global-carbon-sinks/index.html index a4030d26..c87ab097 100644 --- a/news/2017/global-carbon-sinks/index.html +++ b/news/2017/global-carbon-sinks/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>An international research involving the Universitat de València just provided a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks. The results are published in Nature with Gustau Camps-Valls as co-author.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>An international research involving the Universitat de València just provided a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks. The results are published in Nature with Gustau Camps-Valls as co-author.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2017/jesus-malo-editor/index.html b/news/2017/jesus-malo-editor/index.html index e6c9e387..50f6159e 100644 --- a/news/2017/jesus-malo-editor/index.html +++ b/news/2017/jesus-malo-editor/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Prof. Jesús Malo has been appointed as Associate Editor of Frontiers in Neuroscience (IF = 3.4), section Perception Science (May 2017).</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Prof. Jesús Malo has been appointed as Associate Editor of Frontiers in Neuroscience (IF = 3.4), section Perception Science (May 2017).</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2017/metode-tv-sedal-2017/index.html b/news/2017/metode-tv-sedal-2017/index.html index ce11c57d..32ead43e 100644 --- a/news/2017/metode-tv-sedal-2017/index.html +++ b/news/2017/metode-tv-sedal-2017/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2018/group-growing/index.html b/news/2018/group-growing/index.html index e67453b5..20ceed36 100644 --- a/news/2018/group-growing/index.html +++ b/news/2018/group-growing/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The group is growing quite a lot! New people with lots of ideas, background and expertises. Welcome Jose Enrique, Jose Juan, Diego, Dan, Emiliano, and Emma! Check their profiles and exciting research here!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The group is growing quite a lot! New people with lots of ideas, background and expertises. Welcome Jose Enrique, Jose Juan, Diego, Dan, Emiliano, and Emma! Check their profiles and exciting research here!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2018/h-index-60/index.html b/news/2018/h-index-60/index.html index beaeee84..d47326d7 100644 --- a/news/2018/h-index-60/index.html +++ b/news/2018/h-index-60/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>We reached an h-index of 60. Follow our research here!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>We reached an h-index of 60. Follow our research here!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2018/ieee-fellow/index.html b/news/2018/ieee-fellow/index.html index a0f80a31..6c3a662c 100644 --- a/news/2018/ieee-fellow/index.html +++ b/news/2018/ieee-fellow/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Professor Camps-Valls was elevated to the grade of IEEE Fellow by the Geoscience and Remote Sensing Society, and also recognized by the Signal Processing Society.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Professor Camps-Valls was elevated to the grade of IEEE Fellow by the Geoscience and Remote Sensing Society, and also recognized by the Signal Processing Society.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2018/jesus-malo-professorship/index.html b/news/2018/jesus-malo-professorship/index.html index 1f23f6f7..d7ec2ed6 100644 --- a/news/2018/jesus-malo-professorship/index.html +++ b/news/2018/jesus-malo-professorship/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Jesús Malo got the full professorship in Vision Science at the Department of Physics in July 2018.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Jesús Malo got the full professorship in Vision Science at the Department of Physics in July 2018.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2019/cool-toolboxes/index.html b/news/2019/cool-toolboxes/index.html index b23abe7b..b15109a0 100644 --- a/news/2019/cool-toolboxes/index.html +++ b/news/2019/cool-toolboxes/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Cool! Impressive progress in our toolboxes - Gaussian processes repository & many more. Give it a try! Feedback welcome.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Cool! Impressive progress in our toolboxes - Gaussian processes repository & many more. Give it a try! Feedback welcome.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2020/elise-network-member/index.html b/news/2020/elise-network-member/index.html index 617ac6d5..f0d09c30 100644 --- a/news/2020/elise-network-member/index.html +++ b/news/2020/elise-network-member/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>ISP Member of the European Network of Artificial Intelligence Excellence Centres ELISE.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>ISP Member of the European Network of Artificial Intelligence Excellence Centres ELISE.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2020/horizon-2020-imiracli/index.html b/news/2020/horizon-2020-imiracli/index.html index 3fe5d5b4..a1d21b4d 100644 --- a/news/2020/horizon-2020-imiracli/index.html +++ b/news/2020/horizon-2020-imiracli/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Starting on January 2020, - Horizon-H2020 EU iMIRACLI Bringing together leading climate and machine learning experts across Europe to train a new next generation of climate data scientists.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Starting on January 2020, - Horizon-H2020 EU iMIRACLI Bringing together leading climate and machine learning experts across Europe to train a new next generation of climate data scientists.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2020/ieee-distinguished-lecturer/index.html b/news/2020/ieee-distinguished-lecturer/index.html index 77b050ee..a3df0d6e 100644 --- a/news/2020/ieee-distinguished-lecturer/index.html +++ b/news/2020/ieee-distinguished-lecturer/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2020/interview-in-nature/index.html b/news/2020/interview-in-nature/index.html index 6ea13156..13a7b177 100644 --- a/news/2020/interview-in-nature/index.html +++ b/news/2020/interview-in-nature/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Prof. Gustau Camps-Valls was interviewed about the international research published in Nature, which provides a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Prof. Gustau Camps-Valls was interviewed about the international research published in Nature, which provides a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2020/metode-tv-sedal/index.html b/news/2020/metode-tv-sedal/index.html index ce11c57d..32ead43e 100644 --- a/news/2020/metode-tv-sedal/index.html +++ b/news/2020/metode-tv-sedal/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2021/ai-oceans-sea-level/index.html b/news/2021/ai-oceans-sea-level/index.html index 31f7d7ed..8bc8489c 100644 --- a/news/2021/ai-oceans-sea-level/index.html +++ b/news/2021/ai-oceans-sea-level/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>La inteligencia artificial llega a los océanos para desarrollar predicciones sobre el nivel del mar.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>La inteligencia artificial llega a los océanos para desarrollar predicciones sobre el nivel del mar.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2021/highly-cited-researcher-2021/index.html b/news/2021/highly-cited-researcher-2021/index.html index 62b428c0..e7542d8b 100644 --- a/news/2021/highly-cited-researcher-2021/index.html +++ b/news/2021/highly-cited-researcher-2021/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls was named a Highly Cited Researcher 2021 by Clarivate for Academia and Government WoS in recognition of his contribution to AI for the geosciences and scientific enthusiasm! We are all happy of being part of this great team at ISP, participating in inspiring research projects and promoting international collaboration networks, Gustau. Congrats!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls was named a Highly Cited Researcher 2021 by Clarivate for Academia and Government WoS in recognition of his contribution to AI for the geosciences and scientific enthusiasm! We are all happy of being part of this great team at ISP, participating in inspiring research projects and promoting international collaboration networks, Gustau. Congrats!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2021/horizon-2020-xaida/index.html b/news/2021/horizon-2020-xaida/index.html index 4be45434..bbb45079 100644 --- a/news/2021/horizon-2020-xaida/index.html +++ b/news/2021/horizon-2020-xaida/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Kick-off September! Horizon 2020 EU XAIDA Extreme Events: Artificial Intelligence for detection and attribution- Climate Change studies. Stay tuned!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Kick-off September! Horizon 2020 EU XAIDA Extreme Events: Artificial Intelligence for detection and attribution- Climate Change studies. Stay tuned!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2021/isp-ipl-growing-up/index.html b/news/2021/isp-ipl-growing-up/index.html index 91002bc2..8fd05d75 100644 --- a/news/2021/isp-ipl-growing-up/index.html +++ b/news/2021/isp-ipl-growing-up/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>ISP-IPL growing-up New people and projects. Welcome! Exciting research and synergies going on! Check out our collaborators and projects sections -ERC USMILE, H2020 DeepCube, H2020 iMIRACLI, Deepcloud, Leaves, Maloc… we explore the domians of land, atmosphere, ocean, computer vision and more!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>ISP-IPL growing-up New people and projects. Welcome! Exciting research and synergies going on! Check out our collaborators and projects sections -ERC USMILE, H2020 DeepCube, H2020 iMIRACLI, Deepcloud, Leaves, Maloc… we explore the domians of land, atmosphere, ocean, computer vision and more!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2021/worldfloods-detection/index.html b/news/2021/worldfloods-detection/index.html index 7e7fbf34..8a2ff5bf 100644 --- a/news/2021/worldfloods-detection/index.html +++ b/news/2021/worldfloods-detection/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>“Worldfloods” detection model onboard SpaceX’s Falcon rocket launched June 30th! Our script is now flying on a hard-disk at 20,000 km from the Earth! Successfully developed by ISP & Oxford University in partnership with ESA Φ-lab! Who made it possible? Gonzalo Mateo-Garcia, Joshua Veitch-Michaelis, Lewis Smith, Silviu Oprea, Guy Schumann, Yarin Gal, Atılım Güneş Baydin, and Dietmar Backes. Congrats!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>“Worldfloods” detection model onboard SpaceX’s Falcon rocket launched June 30th! Our script is now flying on a hard-disk at 20,000 km from the Earth! Successfully developed by ISP & Oxford University in partnership with ESA Φ-lab! Who made it possible? Gonzalo Mateo-Garcia, Joshua Veitch-Michaelis, Lewis Smith, Silviu Oprea, Guy Schumann, Yarin Gal, Atılım Güneş Baydin, and Dietmar Backes. Congrats!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2022/ai4science-deepextremes/index.html b/news/2022/ai4science-deepextremes/index.html index 01e64efb..35211ddb 100644 --- a/news/2022/ai4science-deepextremes/index.html +++ b/news/2022/ai4science-deepextremes/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>ESA 2022-23 AI4Science DeepExtremes Project Kicked-off! Multi-Hazards, Compounds and Cascade events. We will rely on deep learning to deal with spatio-temporal data, techniques from computer vision for forecasting impacts, and the advanced regression methods for associating impacts on biosphere and society. Understanding what the DL models have learned are of importance here: explainable AI techniques and methods from modern Bayesian inference. Follow us on Twitter DeepExtremes LeipzigValenciaJenaHamburg.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>ESA 2022-23 AI4Science DeepExtremes Project Kicked-off! Multi-Hazards, Compounds and Cascade events. We will rely on deep learning to deal with spatio-temporal data, techniques from computer vision for forecasting impacts, and the advanced regression methods for associating impacts on biosphere and society. Understanding what the DL models have learned are of importance here: explainable AI techniques and methods from modern Bayesian inference. Follow us on Twitter DeepExtremes LeipzigValenciaJenaHamburg.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2022/elise-project/index.html b/news/2022/elise-project/index.html index fe84c285..62dec977 100644 --- a/news/2022/elise-project/index.html +++ b/news/2022/elise-project/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The ISP participates in the ELISE project actively, and contributes to the ELISE vision for the next generation of AI for Europe. In particular, on ELISE’s Strategic Research Agenda and trends in AI!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The ISP participates in the ELISE project actively, and contributes to the ELISE vision for the next generation of AI for Europe. In particular, on ELISE’s Strategic Research Agenda and trends in AI!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2022/highly-cited-researcher/index.html b/news/2022/highly-cited-researcher/index.html index b635fb5c..7f8f0b6a 100644 --- a/news/2022/highly-cited-researcher/index.html +++ b/news/2022/highly-cited-researcher/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls is a Highly Cited Researcher in 2022 again!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls is a Highly Cited Researcher in 2022 again!</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2022/ia-limitations/index.html b/news/2022/ia-limitations/index.html index ef53bd14..1b10a450 100644 --- a/news/2022/ia-limitations/index.html +++ b/news/2022/ia-limitations/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>El prof. Camps-Valls publica un artículo advirtiendo de las limitaciones de la IA actual, y aboga por incorporar conocimiento del dominio y las leyes de la Física, realizar un mayor esfuerzo en la explicabilidad de los modelos, y en la inferencia causal.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>El prof. Camps-Valls publica un artículo advirtiendo de las limitaciones de la IA actual, y aboga por incorporar conocimiento del dominio y las leyes de la Física, realizar un mayor esfuerzo en la explicabilidad de los modelos, y en la inferencia causal.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2022/nuevo-miembro-academia/index.html b/news/2022/nuevo-miembro-academia/index.html index f426a6b8..c76bda75 100644 --- a/news/2022/nuevo-miembro-academia/index.html +++ b/news/2022/nuevo-miembro-academia/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>El prof. Camps-Valls es un nuevo miembro de la Academia Europea de las Ciencias, y de la Academia Europaea.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>El prof. Camps-Valls es un nuevo miembro de la Academia Europea de las Ciencias, y de la Academia Europaea.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/ai-for-earth/index.html b/news/2023/ai-for-earth/index.html index 9d17aa3e..f2925573 100644 --- a/news/2023/ai-for-earth/index.html +++ b/news/2023/ai-for-earth/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls, Markus Reichstein, Joachim Denzler y Maria Piles coordinan el ciclo “AI for Earth and Sustainability Science” dentro de las acciones AI for Good de la ITU.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls, Markus Reichstein, Joachim Denzler y Maria Piles coordinan el ciclo “AI for Earth and Sustainability Science” dentro de las acciones AI for Good de la ITU.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/ai4cs-prometeo/index.html b/news/2023/ai4cs-prometeo/index.html index 2147d124..7e05de6e 100644 --- a/news/2023/ai4cs-prometeo/index.html +++ b/news/2023/ai4cs-prometeo/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>AI4CS - GVA PROMETEO 2022-2025 AI for complex systems: Brain, Earth, Climate, Society. We are hosting the first project meeting in person! The aim is to know each other better, find common points of interest and organize activities e.g. workshops, PhD students exchange, research designs, etc. Presentations by Marcelo Bertalmío from CSIC, Luca Martino from URJC, Sancho Salcedo from UAH, Adriano Camps from UPC, Luis Guanter from UPV, and Gustau Camps-Valls, followed by discussions about limitations on the standard model of vision science, imaging and AI, model and feature selection trade-offs, extreme event predictions and diverse AI-relevant Remote Sensing case studies. Interesting problems sparking new collaborations.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>AI4CS - GVA PROMETEO 2022-2025 AI for complex systems: Brain, Earth, Climate, Society. We are hosting the first project meeting in person! The aim is to know each other better, find common points of interest and organize activities e.g. workshops, PhD students exchange, research designs, etc. Presentations by Marcelo Bertalmío from CSIC, Luca Martino from URJC, Sancho Salcedo from UAH, Adriano Camps from UPC, Luis Guanter from UPV, and Gustau Camps-Valls, followed by discussions about limitations on the standard model of vision science, imaging and AI, model and feature selection trade-offs, extreme event predictions and diverse AI-relevant Remote Sensing case studies. Interesting problems sparking new collaborations.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/ellis-meeting/index.html b/news/2023/ellis-meeting/index.html index 373b92d0..82b21f45 100644 --- a/news/2023/ellis-meeting/index.html +++ b/news/2023/ellis-meeting/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The UV and the ISP gathers in València (Spain) world-renowned experts in two interconnected fields: climate change and artificial intelligence. The meeting is organized by the ELLIS Program ‘Machine Learning for Earth and Climate Sciences’ and the ELISE project, which connect outstanding researchers in these fields across Europe. The discussions will focus on detecting, analyzing, and understanding extreme weather events, such as droughts, heat waves, floods, fires, and hurricanes, and applying artificial intelligence in this context.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The UV and the ISP gathers in València (Spain) world-renowned experts in two interconnected fields: climate change and artificial intelligence. The meeting is organized by the ELLIS Program ‘Machine Learning for Earth and Climate Sciences’ and the ELISE project, which connect outstanding researchers in these fields across Europe. The discussions will focus on detecting, analyzing, and understanding extreme weather events, such as droughts, heat waves, floods, fires, and hurricanes, and applying artificial intelligence in this context.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/esa-egu-2023/index.html b/news/2023/esa-egu-2023/index.html index ece5d866..7486ab38 100644 --- a/news/2023/esa-egu-2023/index.html +++ b/news/2023/esa-egu-2023/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The group led by Gustau Camps-Valls from the University of Valencia in Spain is finalist for the ESA-EGU 2023 team award, for their work on the development of novel Artificial Intelligence methods to analyse Earth observation data, with the goal of modelling and understanding the complex interactions between the various components of the Earth system.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>The group led by Gustau Camps-Valls from the University of Valencia in Spain is finalist for the ESA-EGU 2023 team award, for their work on the development of novel Artificial Intelligence methods to analyse Earth observation data, with the goal of modelling and understanding the complex interactions between the various components of the Earth system.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/fundacion-canada-blanch/index.html b/news/2023/fundacion-canada-blanch/index.html index fb6bd3fa..fc763d7d 100644 --- a/news/2023/fundacion-canada-blanch/index.html +++ b/news/2023/fundacion-canada-blanch/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls, reconocido internacionalmente por sus contribuciones en Inteligencia Artificial, aprendizaje automático y su aplicación en las ciencias de la tierra y el clima, inicia un ciclo de conferencias sobre IA en la Fundación Cañada Blanch.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>Gustau Camps-Valls, reconocido internacionalmente por sus contribuciones en Inteligencia Artificial, aprendizaje automático y su aplicación en las ciencias de la tierra y el clima, inicia un ciclo de conferencias sobre IA en la Fundación Cañada Blanch.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/isp-reuniones/index.html b/news/2023/isp-reuniones/index.html index 43d872bf..a45a5a19 100644 --- a/news/2023/isp-reuniones/index.html +++ b/news/2023/isp-reuniones/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>El ISP organiza dos reuniones que reúnen expertos en fenómenos meteorológicos extremos y en la aplicación de algoritmos en València. Las reuniones estudian estrategias para la detección temprana, dentro de acciones en los proyectos europeos H2020 XAIDA y del programa de investigación en ELLIS “Machine Learning for Earth and Climate Science”, coordinados por Gustau Camps-Valls.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>El ISP organiza dos reuniones que reúnen expertos en fenómenos meteorológicos extremos y en la aplicación de algoritmos en València. Las reuniones estudian estrategias para la detección temprana, dentro de acciones en los proyectos europeos H2020 XAIDA y del programa de investigación en ELLIS “Machine Learning for Earth and Climate Science”, coordinados por Gustau Camps-Valls.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/2023/nature-communications-ia/index.html b/news/2023/nature-communications-ia/index.html index 106fea81..2966b20c 100644 --- a/news/2023/nature-communications-ia/index.html +++ b/news/2023/nature-communications-ia/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>En un artículo en Nature Communications demostramos que la IA descubre que el nivel socioeconómico explica los movimientos poblacionales que se dan tras catástrofes generadas por fenómenos naturales extremos como inundaciones, vendavales y deslizamientos de tierra.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><p>En un artículo en Nature Communications demostramos que la IA descubre que el nivel socioeconómico explica los movimientos poblacionales que se dan tras catástrofes generadas por fenómenos naturales extremos como inundaciones, vendavales y deslizamientos de tierra.</p></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/index.html b/news/index.html index 5b84b70f..d8d60617 100644 --- a/news/index.html +++ b/news/index.html @@ -2,4 +2,4 @@ <link href=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css rel=stylesheet integrity=sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN crossorigin=anonymous><link rel=stylesheet href=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin=anonymous referrerpolicy=no-referrer><link rel="shortcut icon" href=http://isp.uv.es/favicon.ico type=image/x-icon><link rel=stylesheet href=/style/style.css><script src=/js/mode.js></script><script src=https://cdn.jsdelivr.net/npm/marked/marked.min.js></script></head></head><nav class="navbar navbar-expand-lg bg-body-tertiary fixed-top"><div class=container-fluid><a href=/ class="d-lg-none d-flex align-items-center a_logonav"><img src=/images/isp_logo_sinfondo.webp alt="ISP Icon" height=30 class=logo_nav> <span class="ms-2 text-isp">ISP</span> </a><button class="navbar-toggler ms-auto" type=button data-bs-toggle=collapse data-bs-target=#navbarTogglerDemo01 aria-controls=navbarTogglerDemo01 aria-expanded=false aria-label="Toggle navigation" style=height:40px> -<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><div class=content-container style=font-size:14px><h1 style=margin-bottom:20px>News</h1><br><div id=news-container><h2 class=news-year>2023</h2><ul class=news-list><li class=news-item><a href=https://www.fundacioncanadablanch.org/actividades/inteligencia-artificial-para-sostenibilidad/ target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls, reconocido internacionalmente por sus contribuciones en Inteligencia Artificial, aprendizaje automático y su aplicación en las ciencias de la tierra y el clima, inicia un ciclo de conferencias sobre IA en la Fundación Cañada Blanch.</p></a></li><li class=news-item><a href=https://ellis.eu/news/3rd-elise-ellis-research-program-meeting-on-machine-learning-for-earth-and-climate-sciences-starts-in-valencia-today target=_blank rel="noopener noreferrer"><p class=news-summary>The UV and the ISP gathers in València (Spain) world-renowned experts in two interconnected fields: climate change and artificial intelligence. The meeting is organized by the ELLIS Program ‘Machine Learning for Earth and Climate Sciences’ and the ELISE project, which connect outstanding researchers in these fields across Europe. The discussions will focus on detecting, analyzing, and understanding extreme weather events, such as droughts, heat waves, floods, fires, and hurricanes, and applying artificial intelligence in this context.</p></a></li><li class=news-item><a href=https://www.levante-emv.com/comunitat-valenciana/2023/10/17/inteligencia-artificial-usada-gran-aliada-93417060.html target=_blank rel="noopener noreferrer"><p class=news-summary>El ISP organiza dos reuniones que reúnen expertos en fenómenos meteorológicos extremos y en la aplicación de algoritmos en València. Las reuniones estudian estrategias para la detección temprana, dentro de acciones en los proyectos europeos H2020 XAIDA y del programa de investigación en ELLIS “Machine Learning for Earth and Climate Science”, coordinados por Gustau Camps-Valls.</p></a></li><li class=news-item><a href=https://spaceref.com/newspace-and-tech/esa-egu-2023-excellence-award-winners/ target=_blank rel="noopener noreferrer"><p class=news-summary>The group led by Gustau Camps-Valls from the University of Valencia in Spain is finalist for the ESA-EGU 2023 team award, for their work on the development of novel Artificial Intelligence methods to analyse Earth observation data, with the goal of modelling and understanding the complex interactions between the various components of the Earth system.</p></a></li><li class=news-item><a href=https://twitter.com/DeepExtremes target=_blank rel="noopener noreferrer"><p class=news-summary>AI4CS - GVA PROMETEO 2022-2025 AI for complex systems: Brain, Earth, Climate, Society. We are hosting the first project meeting in person! The aim is to know each other better, find common points of interest and organize activities e.g. workshops, PhD students exchange, research designs, etc. Presentations by Marcelo Bertalmío from CSIC, Luca Martino from URJC, Sancho Salcedo from UAH, Adriano Camps from UPC, Luis Guanter from UPV, and Gustau Camps-Valls, followed by discussions about limitations on the standard model of vision science, imaging and AI, model and feature selection trade-offs, extreme event predictions and diverse AI-relevant Remote Sensing case studies.</p></a></li><li class=news-item><a href=https://aiforgood.itu.int/eventcat/discovery-ai-for-earth-and-sustainability-science/ target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls, Markus Reichstein, Joachim Denzler y Maria Piles coordinan el ciclo “AI for Earth and Sustainability Science” dentro de las acciones AI for Good de la ITU.</p></a></li><li class=news-item><a href=https://www.lavanguardia.com/local/valencia/20231215/9452147/ia-demuestra-nivel-socioeconomico-condiciona-migraciones-catastrofes-ambientales-agenciaslv20231215.html target=_blank rel="noopener noreferrer"><p class=news-summary>En un artículo en Nature Communications demostramos que la IA descubre que el nivel socioeconómico explica los movimientos poblacionales que se dan tras catástrofes generadas por fenómenos naturales extremos como inundaciones, vendavales y deslizamientos de tierra.</p></a></li></ul><h2 class=news-year>2022</h2><ul class=news-list><li class=news-item><a href=https://www.levante-emv.com/comunitat-valenciana/2022/11/16/cuatro-cientificos-universitat-valencia-elite-78655408.html target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls is a Highly Cited Researcher in 2022 again!</p></a></li><li class=news-item><a href=https://theconversation.com/la-inteligencia-artificial-es-solo-artificial-184207 target=_blank rel="noopener noreferrer"><p class=news-summary>El prof. Camps-Valls publica un artículo advirtiendo de las limitaciones de la IA actual, y aboga por incorporar conocimiento del dominio y las leyes de la Física, realizar un mayor esfuerzo en la explicabilidad de los modelos, y en la inferencia causal.</p></a></li><li class=news-item><a href=https://www.elise-ai.eu/sra-refresh/strategic-research-agenda-refresh target=_blank rel="noopener noreferrer"><p class=news-summary>The ISP participates in the ELISE project actively, and contributes to the ELISE vision for the next generation of AI for Europe. In particular, on ELISE’s Strategic Research Agenda and trends in AI!</p></a></li><li class=news-item><a href=https://twitter.com/DeepExtremes target=_blank rel="noopener noreferrer"><p class=news-summary>ESA 2022-23 AI4Science DeepExtremes Project Kicked-off! Multi-Hazards, Compounds and Cascade events. We will rely on deep learning to deal with spatio-temporal data, techniques from computer vision for forecasting impacts, and the advanced regression methods for associating impacts on biosphere and society. Understanding what the DL models have learned are of importance here: explainable AI techniques and methods from modern Bayesian inference. Follow us on Twitter DeepExtremes LeipzigValenciaJenaHamburg.</p></a></li><li class=news-item><a href=https://valenciaplaza.com/gustau-camps-valls-miembro-academia-europea-ciencias-europea target=_blank rel="noopener noreferrer"><p class=news-summary>El prof. Camps-Valls es un nuevo miembro de la Academia Europea de las Ciencias, y de la Academia Europaea.</p></a></li></ul><h2 class=news-year>2021</h2><ul class=news-list><li class=news-item><a href=../people target=_blank rel="noopener noreferrer"><p class=news-summary>ISP-IPL growing-up New people and projects. Welcome! Exciting research and synergies going on! Check out our collaborators and projects sections -ERC USMILE, H2020 DeepCube, H2020 iMIRACLI, Deepcloud, Leaves, Maloc… we explore the domians of land, atmosphere, ocean, computer vision and more!</p></a></li><li class=news-item><a href=https://www.lavanguardia.com/vida/20210520/7468342/uv-tendra-fondos-europeos-investigar-ia-sobre-cambio-climatico.html target=_blank rel="noopener noreferrer"><p class=news-summary>Kick-off September! Horizon 2020 EU XAIDA Extreme Events: Artificial Intelligence for detection and attribution- Climate Change studies. Stay tuned!</p></a></li><li class=news-item><a href=https://www.ox.ac.uk/news/2021-06-29-artificial-intelligence-pioneered-oxford-detect-floods-launches-space target=_blank rel="noopener noreferrer"><p class=news-summary>“Worldfloods” detection model onboard SpaceX’s Falcon rocket launched June 30th! Our script is now flying on a hard-disk at 20,000 km from the Earth! Successfully developed by ISP & Oxford University in partnership with ESA Φ-lab! Who made it possible? Gonzalo Mateo-Garcia, Joshua Veitch-Michaelis, Lewis Smith, Silviu Oprea, Guy Schumann, Yarin Gal, Atılım Güneş Baydin, and Dietmar Backes. Congrats!</p></a></li><li class=news-item><a href=https://www.lavanguardia.com/natural/cambio-climatico/20210415/6680759/inteligencia-artificial-veronica-nieves-subida-del-mar.html target=_blank rel="noopener noreferrer"><p class=news-summary>La inteligencia artificial llega a los océanos para desarrollar predicciones sobre el nivel del mar.</p></a></li><li class=news-item><a href="https://www.uv.es/uvweb/uv-noticias/ca/noticies/quatre-cientifics-una-cientifica-universitat-valencia-elit-mundial-nombre-cites-seus-articles-1285973304159/Novetat.html?id=1286227264446&plantilla=UV_Noticies/Page/TPGDetaillNews" target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls was named a Highly Cited Researcher 2021 by Clarivate for Academia and Government WoS in recognition of his contribution to AI for the geosciences and scientific enthusiasm! We are all happy of being part of this great team at ISP, participating in inspiring research projects and promoting international collaboration networks, Gustau. Congrats!</p></a></li></ul><h2 class=news-year>2020</h2><ul class=news-list><li class=news-item><a href="https://www.youtube.com/watch?v=EgBIbWuDs0Y" target=_blank rel="noopener noreferrer"><p class=news-summary>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></a></li><li class=news-item><a href=https://www.elise-ai.eu/ target=_blank rel="noopener noreferrer"><p class=news-summary>ISP Member of the European Network of Artificial Intelligence Excellence Centres ELISE.</p></a></li><li class=news-item><a href="http://www.uv.es/uvweb/college/en/news-release/water-hidden-driver-earth-s-carbon-cycle-1285846070123/Noticia.html?id=1285993414059" target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Gustau Camps-Valls was interviewed about the international research published in Nature, which provides a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks.</p></a></li><li class=news-item><a href=http://www.grss-ieee.org/education/distinguished-lecturers/ target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).</p></a></li><li class=news-item><a href=https://imiracli.web.ox.ac.uk/#/ target=_blank rel="noopener noreferrer"><p class=news-summary>Starting on January 2020, - Horizon-H2020 EU iMIRACLI Bringing together leading climate and machine learning experts across Europe to train a new next generation of climate data scientists.</p></a></li></ul><h2 class=news-year>2019</h2><ul class=news-list><li class=news-item><a href=../code/soft_regression target=_blank rel="noopener noreferrer"><p class=news-summary>Cool! Impressive progress in our toolboxes - Gaussian processes repository & many more. Give it a try! Feedback welcome.</p></a></li></ul><h2 class=news-year>2018</h2><ul class=news-list><li class=news-item><a href=./2018/jesus-malo-professorship/proyectoJMalo.pdf target=_blank rel="noopener noreferrer"><p class=news-summary>Jesús Malo got the full professorship in Vision Science at the Department of Physics in July 2018.</p></a></li><li class=news-item><a href=https://www.ieee.org/membership_services/membership/fellows/2018_elevated_fellows.pdf target=_blank rel="noopener noreferrer"><p class=news-summary>Professor Camps-Valls was elevated to the grade of IEEE Fellow by the Geoscience and Remote Sensing Society, and also recognized by the Signal Processing Society.</p></a></li><li class=news-item><a href="https://scholar.google.es/citations?hl=en&user=MsMYu-IAAAAJ" target=_blank rel="noopener noreferrer"><p class=news-summary>We reached an h-index of 60. Follow our research here!</p></a></li><li class=news-item><a href=../people target=_blank rel="noopener noreferrer"><p class=news-summary>The group is growing quite a lot! New people with lots of ideas, background and expertises. Welcome Jose Enrique, Jose Juan, Diego, Dan, Emiliano, and Emma! Check their profiles and exciting research here!</p></a></li></ul><h2 class=news-year>2017</h2><ul class=news-list><li class=news-item><a href=http://journal.frontiersin.org/journal/neuroscience/section/perception-science#editorial-board target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Jesús Malo has been appointed as Associate Editor of Frontiers in Neuroscience (IF = 3.4), section Perception Science (May 2017).</p></a></li><li class=news-item><a href="https://www.youtube.com/watch?v=EgBIbWuDs0Y" target=_blank rel="noopener noreferrer"><p class=news-summary>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></a></li><li class=news-item><a href="http://www.uv.es/uvweb/college/en/news-release/water-hidden-driver-earth-s-carbon-cycle-1285846070123/Noticia.html?id=1285993414059" target=_blank rel="noopener noreferrer"><p class=news-summary>An international research involving the Universitat de València just provided a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks. The results are published in Nature with Gustau Camps-Valls as co-author.</p></a></li><li class=news-item><a href=http://sites.ieee.org/spain-grss/ target=_blank rel="noopener noreferrer"><p class=news-summary>Maria Piles (president) and Gustau Camps-Valls (member) in the Executive Committee of the Spain Geoscience and Remote Sensing Chapter. Let’s do things to promote technology and science in remote sensing!</p></a></li><li class=news-item><a href=http://eu.wiley.com/WileyCDA/WileyTitle/productCd-1118611799.html target=_blank rel="noopener noreferrer"><p class=news-summary>We are excited about the publication of our new book, entitled “Digital Signal Processing with Kernel Methods”, by Rojo-Álvarez, J.L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G., published by Wiley and sons.</p></a></li><li class=news-item><a href=https://scholar.googleblog.com/2017/06/classic-papers-articles-that-have-stood.html target=_blank rel="noopener noreferrer"><p class=news-summary>We are a classic! “Composite kernels for hyperspectral image classification” published in IEEE Geoscience and Remote Sensing Letters in 2006 is one of the highly-cited papers in its area of research, maintaining its impact long after publication.</p></a></li></ul><h2 class=news-year>2016</h2><ul class=news-list><li class=news-item><a href=./2016/sedal-grant/sedal/index.html target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls recently got an ERC consolidator grant (ERC-CoG) with the project “Statistical Learning for Remote Sensing Data Analysis” (SEDAL) for the period 2015-2020.</p></a></li><li class=news-item><a href=https://sites.google.com/site/mariapiles/ target=_blank rel="noopener noreferrer"><p class=news-summary>Dr. Maria Piles joins the ISP group through a prestigious “Ramón y Cajal” fellowship next January 2017. The ISP consolidates the research line of retrieval of soil moisture and vegetation biogeophysical parameters from space observations (microwave radiometers, radars and hyperspectral imagers). Welcome Maria!</p></a></li><li class=news-item><a href=../courses target=_blank rel="noopener noreferrer"><p class=news-summary>We published learning material on geoscience and remote sensing data processing in IEEE GRSS training material and many more online available!</p></a></li><li class=news-item><a href=http://www.plosone.org/ target=_blank rel="noopener noreferrer"><p class=news-summary>Jesús Malo has been appointed as Academic Editor at PLoS ONE for the period 2014-2017.</p></a></li><li class=news-item><a href=https://www.igarss2018.org/ target=_blank rel="noopener noreferrer"><p class=news-summary>We organized here in València the biggest IEEE remote sensing and geoscience conference, IGARSS, in 2018.</p></a></li><li class=news-item><a href=http://www.classic.grss-ieee.org/community/technical-committees/data-fusion/2015-ieee-grss-data-fusion-contest-results/ target=_blank rel="noopener noreferrer"><p class=news-summary>We won the 2015 IEEE GRSS Data Fusion Contest with a paper on the statistical analysis of the optical and LiDAR complementary information.</p></a></li><li class=news-item><a href=http://www.grss-ieee.org/education/distinguished-lecturers/ target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).</p></a></li><li class=news-item><a href=http://www.springer.com/engineering/electronics/journal/11220 target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls co-edits a Special Issue titled ‘Hyperspectral Imaging and Image Processing’ for the journal Sensing and Imaging (Springer).</p></a></li><li class=news-item><a href="http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=6928548&tag=1" target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls co-edits a Special Issue on the IEEE Geoscience and Remote Sensing Magazine (GRSM).</p></a></li><li class=news-item><a href="https://www.uv.es/uvweb/master-teledeteccio/ca/master-universitari-teledeteccio/google-earth-engine-award-1285883190980/Novetat.html?id=1285945565608" target=_blank rel="noopener noreferrer"><p class=news-summary>Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.</p></a></li><li class=news-item><a href="https://www.uv.es/uvweb/master-teledeteccio/ca/master-universitari-teledeteccio/google-earth-engine-award-1285883190980/Novetat.html?id=1285945565608" target=_blank rel="noopener noreferrer"><p class=news-summary>Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.</p></a></li><li class=news-item><a href=https://sedalproject.wordpress.com/ target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls recently got an ERC consolidator grant (ERC-CoG) with the project “Statistical Learning for Remote Sensing Data Analysis” (SEDAL) for the period 2015-2020.</p></a></li><li class=news-item><a href="http://www.dagstuhl.de/de/programm/kalender/semhp/?semnr=17091" target=_blank rel="noopener noreferrer"><p class=news-summary>We organized a Dagstuhl seminar on “Computer Science meets Ecology” gathering the best researchers of both worlds in a unique environment for fruitful discussions.</p></a></li><li class=news-item><a href="http://www.dagstuhl.de/de/programm/kalender/semhp/?semnr=17091" target=_blank rel="noopener noreferrer"><p class=news-summary>Dagstuhl seminar on “Computer Science meets Ecology” gathering the best researchers of both worlds in a unique environment for fruitful discussions.</p></a></li><li class=news-item><a href=http://www.pcuv.es/es/news-room/noticias-destacadas/2016/analog target=_blank rel="noopener noreferrer"><p class=news-summary>Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations!</p></a></li><li class=news-item><a href=http://www.pcuv.es/es/news-room/noticias-destacadas/2016/analog target=_blank rel="noopener noreferrer"><p class=news-summary>Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations. Welcome back, Javi!</p></a></li></ul></div></div></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file +<span class=navbar-toggler-icon></span></button><div class="collapse navbar-collapse" id=navbarTogglerDemo01><ul class="navbar-nav mx-auto mb-lg-0"><li class="nav-item px-2 nav-item-highlight d-none d-lg-block"><a class="nav-link a" aria-current=page href=/github/>ISP</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/people/>People</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/research/ id=navbarDropdownResearch role=button aria-expanded=false>Research</a><ul class=dropdown-menu aria-labelledby=navbarDropdownResearch><li><a class="dropdown-item a" href=/github/research/machine_learning/>Machine learning</a></li><li><a class="dropdown-item a" href=/github/research/visual_neuroscience/>Visual neuroscience</a></li><li><a class="dropdown-item a" href=/github/research/visual_brain/>Visual brain</a></li><li><a class="dropdown-item a" href=/github/research/earth_science/>Earth science</a></li><li><a class="dropdown-item a" href=/github/research/social_science>Social science</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/projects/>Projects</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/facilities/>Facilities</a></li><li class="nav-item dropdown px-2 nav-item-highlight"><a class="nav-link dropdown-toggle a" href=/github/publications/journals/ id=navbarDropdownPublications role=button aria-expanded=false>Publications</a><ul class=dropdown-menu aria-labelledby=navbarDropdownPublications><li><a class="dropdown-item a" href=/github/publications/journals/>Journals</a></li><li><a class="dropdown-item a" href=/github/publications/conferences/>Conferences</a></li><li><a class="dropdown-item a" href=/github/publications/books/>Books</a></li><li><a class="dropdown-item a" href=/github/publications/talks/>Talks</a></li><li><a class="dropdown-item a" href=/github/publications/technical_reports/>Technical Reports</a></li><li><a class="dropdown-item a" href=/github/publications/theses/>Theses</a></li></ul></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/code/>Code</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/data/>Data</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/seminars/>Seminars</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/courses/>Courses</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/collaborators/>Collaborators</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/news/>News</a></li><li class="nav-item px-2 nav-item-highlight"><a class="nav-link a" aria-current=page href=/github/contact/>Contact</a></li></ul></div></div></nav><style>.navbar{--bs-navbar-padding-y:0rem !important;background-color:#222!important}.a{color:#949494!important}.a:hover{color:#fff!important}.nav-item-highlight{padding:.4rem}.nav-item-highlight:hover{background-color:#2d70aa!important}.dropdown-menu{background-color:#333!important;color:#fff!important;display:block}.dropdown-item{color:#949494!important}.dropdown-item:hover{background-color:#2d70aa!important;color:#fff!important}.navbar-nav li.nav-item .nav-link,.dropdown-menu .dropdown-item{transition:color .3s,background-color .3s}.nav-link.active,.dropdown-item.active{color:#fff!important;background-color:#007bff!important}.navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='white' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-toggler{border:none}.navbar-toggler:focus{outline:none}@media(min-width:992px){.dropdown-menu{display:none}.dropdown:hover .dropdown-menu{display:block}}@media(max-width:991px){.navbar-nav{max-height:calc(100vh - 56px);overflow-y:auto}.navbar-nav::-webkit-scrollbar{width:9px}.navbar-nav::-webkit-scrollbar-thumb{background-color:#888;border-radius:10px}.navbar-nav::-webkit-scrollbar-thumb:hover{background-color:#555}.navbar-nav .nav-link{font-size:1.2rem}.dropdown-menu .dropdown-item{font-size:1rem}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.navbar .dropdown-toggle::after{content:none!important}.navbar-toggler-icon{width:1.2em;height:1.2em}.navbar-toggler{border:none!important}.navbar-toggler:focus{box-shadow:none!important}.d-flex{height:45px}.a_logonav{text-decoration:none!important}.text-isp{font-size:1.3rem;color:#9d9d9d;display:inline-block;vertical-align:middle;text-decoration:none!important}.navbar-nav{max-height:calc(50vh - 56px);overflow-y:auto}}</style><script>document.addEventListener("DOMContentLoaded",function(){let e=document.querySelectorAll(".navbar .dropdown");e.forEach(function(e){let t=e.querySelector(".dropdown-toggle");t.addEventListener("click",function(e){window.innerWidth<992&&e.target===t&&(window.location.href=t.href)});let n=e.querySelectorAll(".dropdown-item");n.forEach(function(e){e.addEventListener("click",function(){window.location.href=e.href})})}),document.addEventListener("click",function(t){window.innerWidth<992&&!t.target.closest(".navbar .dropdown")&&e.forEach(function(e){e.querySelector(".dropdown-menu").classList.remove("show")})})})</script><main><div class=container><div class=content-container style=font-size:14px><h1 style=margin-bottom:20px>News</h1><br><div id=news-container><h2 class=news-year>2023</h2><ul class=news-list><li class=news-item><a href=https://www.fundacioncanadablanch.org/actividades/inteligencia-artificial-para-sostenibilidad/ target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls, reconocido internacionalmente por sus contribuciones en Inteligencia Artificial, aprendizaje automático y su aplicación en las ciencias de la tierra y el clima, inicia un ciclo de conferencias sobre IA en la Fundación Cañada Blanch.</p></a></li><li class=news-item><a href=https://ellis.eu/news/3rd-elise-ellis-research-program-meeting-on-machine-learning-for-earth-and-climate-sciences-starts-in-valencia-today target=_blank rel="noopener noreferrer"><p class=news-summary>The UV and the ISP gathers in València (Spain) world-renowned experts in two interconnected fields: climate change and artificial intelligence. The meeting is organized by the ELLIS Program ‘Machine Learning for Earth and Climate Sciences’ and the ELISE project, which connect outstanding researchers in these fields across Europe. The discussions will focus on detecting, analyzing, and understanding extreme weather events, such as droughts, heat waves, floods, fires, and hurricanes, and applying artificial intelligence in this context.</p></a></li><li class=news-item><a href=https://www.levante-emv.com/comunitat-valenciana/2023/10/17/inteligencia-artificial-usada-gran-aliada-93417060.html target=_blank rel="noopener noreferrer"><p class=news-summary>El ISP organiza dos reuniones que reúnen expertos en fenómenos meteorológicos extremos y en la aplicación de algoritmos en València. Las reuniones estudian estrategias para la detección temprana, dentro de acciones en los proyectos europeos H2020 XAIDA y del programa de investigación en ELLIS “Machine Learning for Earth and Climate Science”, coordinados por Gustau Camps-Valls.</p></a></li><li class=news-item><a href=https://spaceref.com/newspace-and-tech/esa-egu-2023-excellence-award-winners/ target=_blank rel="noopener noreferrer"><p class=news-summary>The group led by Gustau Camps-Valls from the University of Valencia in Spain is finalist for the ESA-EGU 2023 team award, for their work on the development of novel Artificial Intelligence methods to analyse Earth observation data, with the goal of modelling and understanding the complex interactions between the various components of the Earth system.</p></a></li><li class=news-item><a href=https://twitter.com/DeepExtremes target=_blank rel="noopener noreferrer"><p class=news-summary>AI4CS - GVA PROMETEO 2022-2025 AI for complex systems: Brain, Earth, Climate, Society. We are hosting the first project meeting in person! The aim is to know each other better, find common points of interest and organize activities e.g. workshops, PhD students exchange, research designs, etc. Presentations by Marcelo Bertalmío from CSIC, Luca Martino from URJC, Sancho Salcedo from UAH, Adriano Camps from UPC, Luis Guanter from UPV, and Gustau Camps-Valls, followed by discussions about limitations on the standard model of vision science, imaging and AI, model and feature selection trade-offs, extreme event predictions and diverse AI-relevant Remote Sensing case studies.</p></a></li><li class=news-item><a href=https://aiforgood.itu.int/eventcat/discovery-ai-for-earth-and-sustainability-science/ target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls, Markus Reichstein, Joachim Denzler y Maria Piles coordinan el ciclo “AI for Earth and Sustainability Science” dentro de las acciones AI for Good de la ITU.</p></a></li><li class=news-item><a href=https://www.lavanguardia.com/local/valencia/20231215/9452147/ia-demuestra-nivel-socioeconomico-condiciona-migraciones-catastrofes-ambientales-agenciaslv20231215.html target=_blank rel="noopener noreferrer"><p class=news-summary>En un artículo en Nature Communications demostramos que la IA descubre que el nivel socioeconómico explica los movimientos poblacionales que se dan tras catástrofes generadas por fenómenos naturales extremos como inundaciones, vendavales y deslizamientos de tierra.</p></a></li></ul><h2 class=news-year>2022</h2><ul class=news-list><li class=news-item><a href=https://www.levante-emv.com/comunitat-valenciana/2022/11/16/cuatro-cientificos-universitat-valencia-elite-78655408.html target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls is a Highly Cited Researcher in 2022 again!</p></a></li><li class=news-item><a href=https://theconversation.com/la-inteligencia-artificial-es-solo-artificial-184207 target=_blank rel="noopener noreferrer"><p class=news-summary>El prof. Camps-Valls publica un artículo advirtiendo de las limitaciones de la IA actual, y aboga por incorporar conocimiento del dominio y las leyes de la Física, realizar un mayor esfuerzo en la explicabilidad de los modelos, y en la inferencia causal.</p></a></li><li class=news-item><a href=https://www.elise-ai.eu/sra-refresh/strategic-research-agenda-refresh target=_blank rel="noopener noreferrer"><p class=news-summary>The ISP participates in the ELISE project actively, and contributes to the ELISE vision for the next generation of AI for Europe. In particular, on ELISE’s Strategic Research Agenda and trends in AI!</p></a></li><li class=news-item><a href=https://twitter.com/DeepExtremes target=_blank rel="noopener noreferrer"><p class=news-summary>ESA 2022-23 AI4Science DeepExtremes Project Kicked-off! Multi-Hazards, Compounds and Cascade events. We will rely on deep learning to deal with spatio-temporal data, techniques from computer vision for forecasting impacts, and the advanced regression methods for associating impacts on biosphere and society. Understanding what the DL models have learned are of importance here: explainable AI techniques and methods from modern Bayesian inference. Follow us on Twitter DeepExtremes LeipzigValenciaJenaHamburg.</p></a></li><li class=news-item><a href=https://valenciaplaza.com/gustau-camps-valls-miembro-academia-europea-ciencias-europea target=_blank rel="noopener noreferrer"><p class=news-summary>El prof. Camps-Valls es un nuevo miembro de la Academia Europea de las Ciencias, y de la Academia Europaea.</p></a></li></ul><h2 class=news-year>2021</h2><ul class=news-list><li class=news-item><a href=../people target=_blank rel="noopener noreferrer"><p class=news-summary>ISP-IPL growing-up New people and projects. Welcome! Exciting research and synergies going on! Check out our collaborators and projects sections -ERC USMILE, H2020 DeepCube, H2020 iMIRACLI, Deepcloud, Leaves, Maloc… we explore the domians of land, atmosphere, ocean, computer vision and more!</p></a></li><li class=news-item><a href=https://www.lavanguardia.com/vida/20210520/7468342/uv-tendra-fondos-europeos-investigar-ia-sobre-cambio-climatico.html target=_blank rel="noopener noreferrer"><p class=news-summary>Kick-off September! Horizon 2020 EU XAIDA Extreme Events: Artificial Intelligence for detection and attribution- Climate Change studies. Stay tuned!</p></a></li><li class=news-item><a href=https://www.ox.ac.uk/news/2021-06-29-artificial-intelligence-pioneered-oxford-detect-floods-launches-space target=_blank rel="noopener noreferrer"><p class=news-summary>“Worldfloods” detection model onboard SpaceX’s Falcon rocket launched June 30th! Our script is now flying on a hard-disk at 20,000 km from the Earth! Successfully developed by ISP & Oxford University in partnership with ESA Φ-lab! Who made it possible? Gonzalo Mateo-Garcia, Joshua Veitch-Michaelis, Lewis Smith, Silviu Oprea, Guy Schumann, Yarin Gal, Atılım Güneş Baydin, and Dietmar Backes. Congrats!</p></a></li><li class=news-item><a href=https://www.lavanguardia.com/natural/cambio-climatico/20210415/6680759/inteligencia-artificial-veronica-nieves-subida-del-mar.html target=_blank rel="noopener noreferrer"><p class=news-summary>La inteligencia artificial llega a los océanos para desarrollar predicciones sobre el nivel del mar.</p></a></li><li class=news-item><a href="https://www.uv.es/uvweb/uv-noticias/ca/noticies/quatre-cientifics-una-cientifica-universitat-valencia-elit-mundial-nombre-cites-seus-articles-1285973304159/Novetat.html?id=1286227264446&plantilla=UV_Noticies/Page/TPGDetaillNews" target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls was named a Highly Cited Researcher 2021 by Clarivate for Academia and Government WoS in recognition of his contribution to AI for the geosciences and scientific enthusiasm! We are all happy of being part of this great team at ISP, participating in inspiring research projects and promoting international collaboration networks, Gustau. Congrats!</p></a></li></ul><h2 class=news-year>2020</h2><ul class=news-list><li class=news-item><a href="https://www.youtube.com/watch?v=EgBIbWuDs0Y" target=_blank rel="noopener noreferrer"><p class=news-summary>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></a></li><li class=news-item><a href=https://www.elise-ai.eu/ target=_blank rel="noopener noreferrer"><p class=news-summary>ISP Member of the European Network of Artificial Intelligence Excellence Centres ELISE.</p></a></li><li class=news-item><a href="http://www.uv.es/uvweb/college/en/news-release/water-hidden-driver-earth-s-carbon-cycle-1285846070123/Noticia.html?id=1285993414059" target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Gustau Camps-Valls was interviewed about the international research published in Nature, which provides a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks.</p></a></li><li class=news-item><a href=http://www.grss-ieee.org/education/distinguished-lecturers/ target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).</p></a></li><li class=news-item><a href=https://imiracli.web.ox.ac.uk/#/ target=_blank rel="noopener noreferrer"><p class=news-summary>Starting on January 2020, - Horizon-H2020 EU iMIRACLI Bringing together leading climate and machine learning experts across Europe to train a new next generation of climate data scientists.</p></a></li></ul><h2 class=news-year>2019</h2><ul class=news-list><li class=news-item><a href=../code/soft_regression target=_blank rel="noopener noreferrer"><p class=news-summary>Cool! Impressive progress in our toolboxes - Gaussian processes repository & many more. Give it a try! Feedback welcome.</p></a></li></ul><h2 class=news-year>2018</h2><ul class=news-list><li class=news-item><a href=./2018/jesus-malo-professorship/proyectoJMalo.pdf target=_blank rel="noopener noreferrer"><p class=news-summary>Jesús Malo got the full professorship in Vision Science at the Department of Physics in July 2018.</p></a></li><li class=news-item><a href=https://www.ieee.org/membership_services/membership/fellows/2018_elevated_fellows.pdf target=_blank rel="noopener noreferrer"><p class=news-summary>Professor Camps-Valls was elevated to the grade of IEEE Fellow by the Geoscience and Remote Sensing Society, and also recognized by the Signal Processing Society.</p></a></li><li class=news-item><a href="https://scholar.google.es/citations?hl=en&user=MsMYu-IAAAAJ" target=_blank rel="noopener noreferrer"><p class=news-summary>We reached an h-index of 60. Follow our research here!</p></a></li><li class=news-item><a href=../people target=_blank rel="noopener noreferrer"><p class=news-summary>The group is growing quite a lot! New people with lots of ideas, background and expertises. Welcome Jose Enrique, Jose Juan, Diego, Dan, Emiliano, and Emma! Check their profiles and exciting research here!</p></a></li></ul><h2 class=news-year>2017</h2><ul class=news-list><li class=news-item><a href=http://journal.frontiersin.org/journal/neuroscience/section/perception-science#editorial-board target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Jesús Malo has been appointed as Associate Editor of Frontiers in Neuroscience (IF = 3.4), section Perception Science (May 2017).</p></a></li><li class=news-item><a href="https://www.youtube.com/watch?v=EgBIbWuDs0Y" target=_blank rel="noopener noreferrer"><p class=news-summary>Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.</p></a></li><li class=news-item><a href="http://www.uv.es/uvweb/college/en/news-release/water-hidden-driver-earth-s-carbon-cycle-1285846070123/Noticia.html?id=1285993414059" target=_blank rel="noopener noreferrer"><p class=news-summary>An international research involving the Universitat de València just provided a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks. The results are published in Nature with Gustau Camps-Valls as co-author.</p></a></li><li class=news-item><a href=http://sites.ieee.org/spain-grss/ target=_blank rel="noopener noreferrer"><p class=news-summary>Maria Piles (president) and Gustau Camps-Valls (member) in the Executive Committee of the Spain Geoscience and Remote Sensing Chapter. Let’s do things to promote technology and science in remote sensing!</p></a></li><li class=news-item><a href=http://eu.wiley.com/WileyCDA/WileyTitle/productCd-1118611799.html target=_blank rel="noopener noreferrer"><p class=news-summary>We are excited about the publication of our new book, entitled “Digital Signal Processing with Kernel Methods”, by Rojo-Álvarez, J.L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G., published by Wiley and sons.</p></a></li><li class=news-item><a href=https://scholar.googleblog.com/2017/06/classic-papers-articles-that-have-stood.html target=_blank rel="noopener noreferrer"><p class=news-summary>We are a classic! “Composite kernels for hyperspectral image classification” published in IEEE Geoscience and Remote Sensing Letters in 2006 is one of the highly-cited papers in its area of research, maintaining its impact long after publication.</p></a></li></ul><h2 class=news-year>2016</h2><ul class=news-list><li class=news-item><a href=./2016/sedal-grant/sedal/index.html target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls recently got an ERC consolidator grant (ERC-CoG) with the project “Statistical Learning for Remote Sensing Data Analysis” (SEDAL) for the period 2015-2020.</p></a></li><li class=news-item><a href=https://sites.google.com/site/mariapiles/ target=_blank rel="noopener noreferrer"><p class=news-summary>Dr. Maria Piles joins the ISP group through a prestigious “Ramón y Cajal” fellowship next January 2017. The ISP consolidates the research line of retrieval of soil moisture and vegetation biogeophysical parameters from space observations (microwave radiometers, radars and hyperspectral imagers). Welcome Maria!</p></a></li><li class=news-item><a href=../courses target=_blank rel="noopener noreferrer"><p class=news-summary>We published learning material on geoscience and remote sensing data processing in IEEE GRSS training material and many more online available!</p></a></li><li class=news-item><a href=http://www.plosone.org/ target=_blank rel="noopener noreferrer"><p class=news-summary>Jesús Malo has been appointed as Academic Editor at PLoS ONE for the period 2014-2017.</p></a></li><li class=news-item><a href=https://www.igarss2018.org/ target=_blank rel="noopener noreferrer"><p class=news-summary>We organized here in València the biggest IEEE remote sensing and geoscience conference, IGARSS, in 2018.</p></a></li><li class=news-item><a href=http://www.classic.grss-ieee.org/community/technical-committees/data-fusion/2015-ieee-grss-data-fusion-contest-results/ target=_blank rel="noopener noreferrer"><p class=news-summary>We won the 2015 IEEE GRSS Data Fusion Contest with a paper on the statistical analysis of the optical and LiDAR complementary information.</p></a></li><li class=news-item><a href=http://www.grss-ieee.org/education/distinguished-lecturers/ target=_blank rel="noopener noreferrer"><p class=news-summary>Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).</p></a></li><li class=news-item><a href=http://www.springer.com/engineering/electronics/journal/11220 target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls co-edits a Special Issue titled ‘Hyperspectral Imaging and Image Processing’ for the journal Sensing and Imaging (Springer).</p></a></li><li class=news-item><a href="http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=6928548&tag=1" target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls co-edits a Special Issue on the IEEE Geoscience and Remote Sensing Magazine (GRSM).</p></a></li><li class=news-item><a href="https://www.uv.es/uvweb/master-teledeteccio/ca/master-universitari-teledeteccio/google-earth-engine-award-1285883190980/Novetat.html?id=1285945565608" target=_blank rel="noopener noreferrer"><p class=news-summary>Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.</p></a></li><li class=news-item><a href="https://www.uv.es/uvweb/master-teledeteccio/ca/master-universitari-teledeteccio/google-earth-engine-award-1285883190980/Novetat.html?id=1285945565608" target=_blank rel="noopener noreferrer"><p class=news-summary>Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.</p></a></li><li class=news-item><a href=https://sedalproject.wordpress.com/ target=_blank rel="noopener noreferrer"><p class=news-summary>Gustau Camps-Valls recently got an ERC consolidator grant (ERC-CoG) with the project “Statistical Learning for Remote Sensing Data Analysis” (SEDAL) for the period 2015-2020.</p></a></li><li class=news-item><a href="http://www.dagstuhl.de/de/programm/kalender/semhp/?semnr=17091" target=_blank rel="noopener noreferrer"><p class=news-summary>We organized a Dagstuhl seminar on “Computer Science meets Ecology” gathering the best researchers of both worlds in a unique environment for fruitful discussions.</p></a></li><li class=news-item><a href="http://www.dagstuhl.de/de/programm/kalender/semhp/?semnr=17091" target=_blank rel="noopener noreferrer"><p class=news-summary>Dagstuhl seminar on “Computer Science meets Ecology” gathering the best researchers of both worlds in a unique environment for fruitful discussions.</p></a></li><li class=news-item><a href=http://www.pcuv.es/es/news-room/noticias-destacadas/2016/analog target=_blank rel="noopener noreferrer"><p class=news-summary>Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations!</p></a></li><li class=news-item><a href=http://www.pcuv.es/es/news-room/noticias-destacadas/2016/analog target=_blank rel="noopener noreferrer"><p class=news-summary>Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations. Welcome back, Javi!</p></a></li></ul></div></div></div></main></body><script src=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js integrity=sha384-C6RzsynM9kWDrMNeT87bh95OGNyZPhcTNXj1NW7RuBCsyN/o0jlpcV8Qyq46cDfL crossorigin=anonymous defer></script><script type=text/javascript src=https://cdn.jsdelivr.net/gh/pcooksey/bibtex-js@1.0.0/src/bibtex_js.min.js defer></script></html> \ No newline at end of file diff --git a/news/index.xml b/news/index.xml index e994bfe3..e7d9574a 100644 --- a/news/index.xml +++ b/news/index.xml @@ -1 +1 @@ -<?xml version="1.0" encoding="utf-8" standalone="yes"?><rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"><channel><title>News on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/news/Recent content in News on ISP - Image and Signal Processing groupHugoen-usSat, 01 Jul 2023 00:00:00 +0000Conferencias sobre IA en Fundación Cañada Blanchhttps://ipl-uv.github.io/news/2023/fundacion-canada-blanch/Sat, 01 Jul 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/fundacion-canada-blanch/Gustau Camps-Valls, reconocido internacionalmente por sus contribuciones en Inteligencia Artificial, aprendizaje automático y su aplicación en las ciencias de la tierra y el clima, inicia un ciclo de conferencias sobre IA en la Fundación Cañada Blanch.ELLIS Meeting in Valenciahttps://ipl-uv.github.io/news/2023/ellis-meeting/Thu, 01 Jun 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/ellis-meeting/The UV and the ISP gathers in València (Spain) world-renowned experts in two interconnected fields: climate change and artificial intelligence. The meeting is organized by the ELLIS Program ‘Machine Learning for Earth and Climate Sciences’ and the ELISE project, which connect outstanding researchers in these fields across Europe. The discussions will focus on detecting, analyzing, and understanding extreme weather events, such as droughts, heat waves, floods, fires, and hurricanes, and applying artificial intelligence in this context.ISP organiza reuniones sobre fenómenos meteorológicos extremoshttps://ipl-uv.github.io/news/2023/isp-reuniones/Mon, 01 May 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/isp-reuniones/El ISP organiza dos reuniones que reúnen expertos en fenómenos meteorológicos extremos y en la aplicación de algoritmos en València. Las reuniones estudian estrategias para la detección temprana, dentro de acciones en los proyectos europeos H2020 XAIDA y del programa de investigación en ELLIS &ldquo;Machine Learning for Earth and Climate Science&rdquo;, coordinados por Gustau Camps-Valls.Finalists for ESA-EGU 2023https://ipl-uv.github.io/news/2023/esa-egu-2023/Sat, 01 Apr 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/esa-egu-2023/The group led by Gustau Camps-Valls from the University of Valencia in Spain is finalist for the ESA-EGU 2023 team award, for their work on the development of novel Artificial Intelligence methods to analyse Earth observation data, with the goal of modelling and understanding the complex interactions between the various components of the Earth system.AI4CS - GVA PROMETEO 2022-2025https://ipl-uv.github.io/news/2023/ai4cs-prometeo/Wed, 01 Mar 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/ai4cs-prometeo/AI4CS - GVA PROMETEO 2022-2025 AI for complex systems: Brain, Earth, Climate, Society. We are hosting the first project meeting in person! The aim is to know each other better, find common points of interest and organize activities e.g. workshops, PhD students exchange, research designs, etc. Presentations by Marcelo Bertalmío from CSIC, Luca Martino from URJC, Sancho Salcedo from UAH, Adriano Camps from UPC, Luis Guanter from UPV, and Gustau Camps-Valls, followed by discussions about limitations on the standard model of vision science, imaging and AI, model and feature selection trade-offs, extreme event predictions and diverse AI-relevant Remote Sensing case studies.AI for Earth and Sustainability Sciencehttps://ipl-uv.github.io/news/2023/ai-for-earth/Wed, 01 Feb 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/ai-for-earth/Gustau Camps-Valls, Markus Reichstein, Joachim Denzler y Maria Piles coordinan el ciclo &ldquo;AI for Earth and Sustainability Science&rdquo; dentro de las acciones AI for Good de la ITU.En un artículo en Nature Communicationshttps://ipl-uv.github.io/news/2023/nature-communications-ia/Sun, 01 Jan 2023 00:00:00 +0000https://ipl-uv.github.io/news/2023/nature-communications-ia/En un artículo en Nature Communications demostramos que la IA descubre que el nivel socioeconómico explica los movimientos poblacionales que se dan tras catástrofes generadas por fenómenos naturales extremos como inundaciones, vendavales y deslizamientos de tierra.Highly Cited Researcher 2022https://ipl-uv.github.io/news/2022/highly-cited-researcher/Sun, 01 May 2022 00:00:00 +0000https://ipl-uv.github.io/news/2022/highly-cited-researcher/Gustau Camps-Valls is a Highly Cited Researcher in 2022 again!Artículo sobre las limitaciones de la IAhttps://ipl-uv.github.io/news/2022/ia-limitations/Fri, 01 Apr 2022 00:00:00 +0000https://ipl-uv.github.io/news/2022/ia-limitations/El prof. Camps-Valls publica un artículo advirtiendo de las limitaciones de la IA actual, y aboga por incorporar conocimiento del dominio y las leyes de la Física, realizar un mayor esfuerzo en la explicabilidad de los modelos, y en la inferencia causal.Participation in ELISE projecthttps://ipl-uv.github.io/news/2022/elise-project/Tue, 01 Mar 2022 00:00:00 +0000https://ipl-uv.github.io/news/2022/elise-project/The ISP participates in the ELISE project actively, and contributes to the ELISE vision for the next generation of AI for Europe. In particular, on ELISE’s Strategic Research Agenda and trends in AI!AI4Science DeepExtremes Project Kicked-offhttps://ipl-uv.github.io/news/2022/ai4science-deepextremes/Tue, 01 Feb 2022 00:00:00 +0000https://ipl-uv.github.io/news/2022/ai4science-deepextremes/ESA 2022-23 AI4Science DeepExtremes Project Kicked-off! Multi-Hazards, Compounds and Cascade events. We will rely on deep learning to deal with spatio-temporal data, techniques from computer vision for forecasting impacts, and the advanced regression methods for associating impacts on biosphere and society. Understanding what the DL models have learned are of importance here: explainable AI techniques and methods from modern Bayesian inference. Follow us on Twitter DeepExtremes LeipzigValenciaJenaHamburg.Nuevo miembro de la Academia Europea de Cienciashttps://ipl-uv.github.io/news/2022/nuevo-miembro-academia/Sat, 01 Jan 2022 00:00:00 +0000https://ipl-uv.github.io/news/2022/nuevo-miembro-academia/El prof. Camps-Valls es un nuevo miembro de la Academia Europea de las Ciencias, y de la Academia Europaea.Horizon 2020 EU XAIDA Kick-offhttps://ipl-uv.github.io/news/2021/horizon-2020-xaida/Wed, 01 Sep 2021 00:00:00 +0000https://ipl-uv.github.io/news/2021/horizon-2020-xaida/Kick-off September! Horizon 2020 EU XAIDA Extreme Events: Artificial Intelligence for detection and attribution- Climate Change studies. Stay tuned!ISP-IPL Growing Uphttps://ipl-uv.github.io/news/2021/isp-ipl-growing-up/Wed, 01 Sep 2021 00:00:00 +0000https://ipl-uv.github.io/news/2021/isp-ipl-growing-up/ISP-IPL growing-up New people and projects. Welcome! Exciting research and synergies going on! Check out our collaborators and projects sections -ERC USMILE, H2020 DeepCube, H2020 iMIRACLI, Deepcloud, Leaves, Maloc&hellip; we explore the domians of land, atmosphere, ocean, computer vision and more!Worldfloods Detection Modelhttps://ipl-uv.github.io/news/2021/worldfloods-detection/Wed, 30 Jun 2021 00:00:00 +0000https://ipl-uv.github.io/news/2021/worldfloods-detection/&ldquo;Worldfloods&rdquo; detection model onboard SpaceX’s Falcon rocket launched June 30th! Our script is now flying on a hard-disk at 20,000 km from the Earth! Successfully developed by ISP &amp; Oxford University in partnership with ESA Φ-lab! Who made it possible? Gonzalo Mateo-Garcia, Joshua Veitch-Michaelis, Lewis Smith, Silviu Oprea, Guy Schumann, Yarin Gal, Atılım Güneş Baydin, and Dietmar Backes. Congrats!AI for Sea Level Predictionshttps://ipl-uv.github.io/news/2021/ai-oceans-sea-level/Thu, 15 Apr 2021 00:00:00 +0000https://ipl-uv.github.io/news/2021/ai-oceans-sea-level/La inteligencia artificial llega a los océanos para desarrollar predicciones sobre el nivel del mar.Highly Cited Researcher 2021https://ipl-uv.github.io/news/2021/highly-cited-researcher-2021/Fri, 01 Jan 2021 00:00:00 +0000https://ipl-uv.github.io/news/2021/highly-cited-researcher-2021/Gustau Camps-Valls was named a Highly Cited Researcher 2021 by Clarivate for Academia and Government WoS in recognition of his contribution to AI for the geosciences and scientific enthusiasm! We are all happy of being part of this great team at ISP, participating in inspiring research projects and promoting international collaboration networks, Gustau. Congrats!Horizon 2020 EU iMIRACLIhttps://ipl-uv.github.io/news/2020/horizon-2020-imiracli/Wed, 01 Jan 2020 00:00:00 +0000https://ipl-uv.github.io/news/2020/horizon-2020-imiracli/Starting on January 2020, - Horizon-H2020 EU iMIRACLI Bringing together leading climate and machine learning experts across Europe to train a new next generation of climate data scientists.IEEE Distinguished Lecturerhttps://ipl-uv.github.io/news/2020/ieee-distinguished-lecturer/Wed, 01 Jan 2020 00:00:00 +0000https://ipl-uv.github.io/news/2020/ieee-distinguished-lecturer/Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).Interview in Naturehttps://ipl-uv.github.io/news/2020/interview-in-nature/Wed, 01 Jan 2020 00:00:00 +0000https://ipl-uv.github.io/news/2020/interview-in-nature/Prof. Gustau Camps-Valls was interviewed about the international research published in Nature, which provides a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks.Member of ELISE Networkhttps://ipl-uv.github.io/news/2020/elise-network-member/Wed, 01 Jan 2020 00:00:00 +0000https://ipl-uv.github.io/news/2020/elise-network-member/ISP Member of the European Network of Artificial Intelligence Excellence Centres ELISE.Mètode TV SEDALhttps://ipl-uv.github.io/news/2020/metode-tv-sedal/Wed, 01 Jan 2020 00:00:00 +0000https://ipl-uv.github.io/news/2020/metode-tv-sedal/Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.Cool Toolboxeshttps://ipl-uv.github.io/news/2019/cool-toolboxes/Tue, 01 Jan 2019 00:00:00 +0000https://ipl-uv.github.io/news/2019/cool-toolboxes/Cool! Impressive progress in our toolboxes - Gaussian processes repository &amp; many more. Give it a try! Feedback welcome.Jesús Malo Full Professorshiphttps://ipl-uv.github.io/news/2018/jesus-malo-professorship/Sun, 01 Jul 2018 00:00:00 +0000https://ipl-uv.github.io/news/2018/jesus-malo-professorship/Jesús Malo got the full professorship in Vision Science at the Department of Physics in July 2018.Group Growinghttps://ipl-uv.github.io/news/2018/group-growing/Mon, 01 Jan 2018 00:00:00 +0000https://ipl-uv.github.io/news/2018/group-growing/The group is growing quite a lot! New people with lots of ideas, background and expertises. Welcome Jose Enrique, Jose Juan, Diego, Dan, Emiliano, and Emma! Check their profiles and exciting research here!H-index 60https://ipl-uv.github.io/news/2018/h-index-60/Mon, 01 Jan 2018 00:00:00 +0000https://ipl-uv.github.io/news/2018/h-index-60/We reached an h-index of 60. Follow our research here!IEEE Fellowhttps://ipl-uv.github.io/news/2018/ieee-fellow/Mon, 01 Jan 2018 00:00:00 +0000https://ipl-uv.github.io/news/2018/ieee-fellow/Professor Camps-Valls was elevated to the grade of IEEE Fellow by the Geoscience and Remote Sensing Society, and also recognized by the Signal Processing Society.Jesús Malo Associate Editorhttps://ipl-uv.github.io/news/2017/jesus-malo-editor/Mon, 01 May 2017 00:00:00 +0000https://ipl-uv.github.io/news/2017/jesus-malo-editor/Prof. Jesús Malo has been appointed as Associate Editor of Frontiers in Neuroscience (IF = 3.4), section Perception Science (May 2017).Composite Kernels Classic Paperhttps://ipl-uv.github.io/news/2017/composite-kernels/Sun, 01 Jan 2017 00:00:00 +0000https://ipl-uv.github.io/news/2017/composite-kernels/We are a classic! &ldquo;Composite kernels for hyperspectral image classification&rdquo; published in IEEE Geoscience and Remote Sensing Letters in 2006 is one of the highly-cited papers in its area of research, maintaining its impact long after publication.DSP with Kernel Methods Bookhttps://ipl-uv.github.io/news/2017/dsp-kernel-methods/Sun, 01 Jan 2017 00:00:00 +0000https://ipl-uv.github.io/news/2017/dsp-kernel-methods/We are excited about the publication of our new book, entitled &ldquo;Digital Signal Processing with Kernel Methods&rdquo;, by Rojo-Álvarez, J.L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G., published by Wiley and sons.Geoscience Committeehttps://ipl-uv.github.io/news/2017/geoscience-committee/Sun, 01 Jan 2017 00:00:00 +0000https://ipl-uv.github.io/news/2017/geoscience-committee/Maria Piles (president) and Gustau Camps-Valls (member) in the Executive Committee of the Spain Geoscience and Remote Sensing Chapter. Let&rsquo;s do things to promote technology and science in remote sensing!Global Carbon Sinks Researchhttps://ipl-uv.github.io/news/2017/global-carbon-sinks/Sun, 01 Jan 2017 00:00:00 +0000https://ipl-uv.github.io/news/2017/global-carbon-sinks/An international research involving the Universitat de València just provided a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks. The results are published in Nature with Gustau Camps-Valls as co-author.Mètode TV SEDALhttps://ipl-uv.github.io/news/2017/metode-tv-sedal-2017/Sun, 01 Jan 2017 00:00:00 +0000https://ipl-uv.github.io/news/2017/metode-tv-sedal-2017/Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.Analog Devices 2016https://ipl-uv.github.io/news/2016/analog-devices-2016/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/analog-devices-2016/Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations. Welcome back, Javi!Analog Devices Moveshttps://ipl-uv.github.io/news/2016/analog-devices-moves/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/analog-devices-moves/Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations!Dagstuhl Seminar 2016https://ipl-uv.github.io/news/2016/dagstuhl-seminar-2016/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/dagstuhl-seminar-2016/Dagstuhl seminar on &ldquo;Computer Science meets Ecology&rdquo; gathering the best researchers of both worlds in a unique environment for fruitful discussions.Dagstuhl Seminar 2016https://ipl-uv.github.io/news/2016/dagstuhl-seminar/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/dagstuhl-seminar/We organized a Dagstuhl seminar on &ldquo;Computer Science meets Ecology&rdquo; gathering the best researchers of both worlds in a unique environment for fruitful discussions.ERC Consolidator Granthttps://ipl-uv.github.io/news/2016/erc-consolidator-grant/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/erc-consolidator-grant/Gustau Camps-Valls recently got an ERC consolidator grant (ERC-CoG) with the project &ldquo;Statistical Learning for Remote Sensing Data Analysis&rdquo; (SEDAL) for the period 2015-2020.Google Award 2015https://ipl-uv.github.io/news/2016/google-award-2015/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/google-award-2015/Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.Google Earth Engine Awardhttps://ipl-uv.github.io/news/2016/google-earth-engine-award/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/google-earth-engine-award/Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.GRSM Special Issuehttps://ipl-uv.github.io/news/2016/grsm-special-issue/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/grsm-special-issue/Gustau Camps-Valls co-edits a Special Issue on the IEEE Geoscience and Remote Sensing Magazine (GRSM).Hyperspectral Imaging Special Issuehttps://ipl-uv.github.io/news/2016/hyperspectral-imaging/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/hyperspectral-imaging/Gustau Camps-Valls co-edits a Special Issue titled &lsquo;Hyperspectral Imaging and Image Processing&rsquo; for the journal Sensing and Imaging (Springer).IEEE Distinguished Lecturer 2016https://ipl-uv.github.io/news/2016/ieee-lecturer-2016/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/ieee-lecturer-2016/Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).IEEE GRSS Data Fusion Contesthttps://ipl-uv.github.io/news/2016/data-fusion-contest/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/data-fusion-contest/We won the 2015 IEEE GRSS Data Fusion Contest with a paper on the statistical analysis of the optical and LiDAR complementary information.IGARSS 2018 Organizationhttps://ipl-uv.github.io/news/2016/igarss-2018-organization/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/igarss-2018-organization/We organized here in València the biggest IEEE remote sensing and geoscience conference, IGARSS, in 2018.Jesús Malo Academic Editorhttps://ipl-uv.github.io/news/2016/academic-editor-plos/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/academic-editor-plos/Jesús Malo has been appointed as Academic Editor at PLoS ONE for the period 2014-2017.Learning Material Publicationhttps://ipl-uv.github.io/news/2016/learning-material/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/learning-material/We published learning material on geoscience and remote sensing data processing in IEEE GRSS training material and many more online available!Maria Piles Ramón y Cajal Fellowshiphttps://ipl-uv.github.io/news/2016/maria-piles-fellowship/Fri, 01 Jan 2016 00:00:00 +0000https://ipl-uv.github.io/news/2016/maria-piles-fellowship/Dr. Maria Piles joins the ISP group through a prestigious &ldquo;Ramón y Cajal&rdquo; fellowship next January 2017. The ISP consolidates the research line of retrieval of soil moisture and vegetation biogeophysical parameters from space observations (microwave radiometers, radars and hyperspectral imagers). Welcome Maria! \ No newline at end of file +News on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/news/Recent content in News on ISP - Image and Signal Processing groupHugoen-usSat, 01 Jul 2023 00:00:00 +0000Conferencias sobre IA en Fundación Cañada Blanchhttps://isp.uv.es/github/news/2023/fundacion-canada-blanch/Sat, 01 Jul 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/fundacion-canada-blanch/Gustau Camps-Valls, reconocido internacionalmente por sus contribuciones en Inteligencia Artificial, aprendizaje automático y su aplicación en las ciencias de la tierra y el clima, inicia un ciclo de conferencias sobre IA en la Fundación Cañada Blanch.ELLIS Meeting in Valenciahttps://isp.uv.es/github/news/2023/ellis-meeting/Thu, 01 Jun 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/ellis-meeting/The UV and the ISP gathers in València (Spain) world-renowned experts in two interconnected fields: climate change and artificial intelligence. The meeting is organized by the ELLIS Program ‘Machine Learning for Earth and Climate Sciences’ and the ELISE project, which connect outstanding researchers in these fields across Europe. The discussions will focus on detecting, analyzing, and understanding extreme weather events, such as droughts, heat waves, floods, fires, and hurricanes, and applying artificial intelligence in this context.ISP organiza reuniones sobre fenómenos meteorológicos extremoshttps://isp.uv.es/github/news/2023/isp-reuniones/Mon, 01 May 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/isp-reuniones/El ISP organiza dos reuniones que reúnen expertos en fenómenos meteorológicos extremos y en la aplicación de algoritmos en València. Las reuniones estudian estrategias para la detección temprana, dentro de acciones en los proyectos europeos H2020 XAIDA y del programa de investigación en ELLIS &ldquo;Machine Learning for Earth and Climate Science&rdquo;, coordinados por Gustau Camps-Valls.Finalists for ESA-EGU 2023https://isp.uv.es/github/news/2023/esa-egu-2023/Sat, 01 Apr 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/esa-egu-2023/The group led by Gustau Camps-Valls from the University of Valencia in Spain is finalist for the ESA-EGU 2023 team award, for their work on the development of novel Artificial Intelligence methods to analyse Earth observation data, with the goal of modelling and understanding the complex interactions between the various components of the Earth system.AI4CS - GVA PROMETEO 2022-2025https://isp.uv.es/github/news/2023/ai4cs-prometeo/Wed, 01 Mar 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/ai4cs-prometeo/AI4CS - GVA PROMETEO 2022-2025 AI for complex systems: Brain, Earth, Climate, Society. We are hosting the first project meeting in person! The aim is to know each other better, find common points of interest and organize activities e.g. workshops, PhD students exchange, research designs, etc. Presentations by Marcelo Bertalmío from CSIC, Luca Martino from URJC, Sancho Salcedo from UAH, Adriano Camps from UPC, Luis Guanter from UPV, and Gustau Camps-Valls, followed by discussions about limitations on the standard model of vision science, imaging and AI, model and feature selection trade-offs, extreme event predictions and diverse AI-relevant Remote Sensing case studies.AI for Earth and Sustainability Sciencehttps://isp.uv.es/github/news/2023/ai-for-earth/Wed, 01 Feb 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/ai-for-earth/Gustau Camps-Valls, Markus Reichstein, Joachim Denzler y Maria Piles coordinan el ciclo &ldquo;AI for Earth and Sustainability Science&rdquo; dentro de las acciones AI for Good de la ITU.En un artículo en Nature Communicationshttps://isp.uv.es/github/news/2023/nature-communications-ia/Sun, 01 Jan 2023 00:00:00 +0000https://isp.uv.es/github/news/2023/nature-communications-ia/En un artículo en Nature Communications demostramos que la IA descubre que el nivel socioeconómico explica los movimientos poblacionales que se dan tras catástrofes generadas por fenómenos naturales extremos como inundaciones, vendavales y deslizamientos de tierra.Highly Cited Researcher 2022https://isp.uv.es/github/news/2022/highly-cited-researcher/Sun, 01 May 2022 00:00:00 +0000https://isp.uv.es/github/news/2022/highly-cited-researcher/Gustau Camps-Valls is a Highly Cited Researcher in 2022 again!Artículo sobre las limitaciones de la IAhttps://isp.uv.es/github/news/2022/ia-limitations/Fri, 01 Apr 2022 00:00:00 +0000https://isp.uv.es/github/news/2022/ia-limitations/El prof. Camps-Valls publica un artículo advirtiendo de las limitaciones de la IA actual, y aboga por incorporar conocimiento del dominio y las leyes de la Física, realizar un mayor esfuerzo en la explicabilidad de los modelos, y en la inferencia causal.Participation in ELISE projecthttps://isp.uv.es/github/news/2022/elise-project/Tue, 01 Mar 2022 00:00:00 +0000https://isp.uv.es/github/news/2022/elise-project/The ISP participates in the ELISE project actively, and contributes to the ELISE vision for the next generation of AI for Europe. In particular, on ELISE’s Strategic Research Agenda and trends in AI!AI4Science DeepExtremes Project Kicked-offhttps://isp.uv.es/github/news/2022/ai4science-deepextremes/Tue, 01 Feb 2022 00:00:00 +0000https://isp.uv.es/github/news/2022/ai4science-deepextremes/ESA 2022-23 AI4Science DeepExtremes Project Kicked-off! Multi-Hazards, Compounds and Cascade events. We will rely on deep learning to deal with spatio-temporal data, techniques from computer vision for forecasting impacts, and the advanced regression methods for associating impacts on biosphere and society. Understanding what the DL models have learned are of importance here: explainable AI techniques and methods from modern Bayesian inference. Follow us on Twitter DeepExtremes LeipzigValenciaJenaHamburg.Nuevo miembro de la Academia Europea de Cienciashttps://isp.uv.es/github/news/2022/nuevo-miembro-academia/Sat, 01 Jan 2022 00:00:00 +0000https://isp.uv.es/github/news/2022/nuevo-miembro-academia/El prof. Camps-Valls es un nuevo miembro de la Academia Europea de las Ciencias, y de la Academia Europaea.Horizon 2020 EU XAIDA Kick-offhttps://isp.uv.es/github/news/2021/horizon-2020-xaida/Wed, 01 Sep 2021 00:00:00 +0000https://isp.uv.es/github/news/2021/horizon-2020-xaida/Kick-off September! Horizon 2020 EU XAIDA Extreme Events: Artificial Intelligence for detection and attribution- Climate Change studies. Stay tuned!ISP-IPL Growing Uphttps://isp.uv.es/github/news/2021/isp-ipl-growing-up/Wed, 01 Sep 2021 00:00:00 +0000https://isp.uv.es/github/news/2021/isp-ipl-growing-up/ISP-IPL growing-up New people and projects. Welcome! Exciting research and synergies going on! Check out our collaborators and projects sections -ERC USMILE, H2020 DeepCube, H2020 iMIRACLI, Deepcloud, Leaves, Maloc&hellip; we explore the domians of land, atmosphere, ocean, computer vision and more!Worldfloods Detection Modelhttps://isp.uv.es/github/news/2021/worldfloods-detection/Wed, 30 Jun 2021 00:00:00 +0000https://isp.uv.es/github/news/2021/worldfloods-detection/&ldquo;Worldfloods&rdquo; detection model onboard SpaceX’s Falcon rocket launched June 30th! Our script is now flying on a hard-disk at 20,000 km from the Earth! Successfully developed by ISP &amp; Oxford University in partnership with ESA Φ-lab! Who made it possible? Gonzalo Mateo-Garcia, Joshua Veitch-Michaelis, Lewis Smith, Silviu Oprea, Guy Schumann, Yarin Gal, Atılım Güneş Baydin, and Dietmar Backes. Congrats!AI for Sea Level Predictionshttps://isp.uv.es/github/news/2021/ai-oceans-sea-level/Thu, 15 Apr 2021 00:00:00 +0000https://isp.uv.es/github/news/2021/ai-oceans-sea-level/La inteligencia artificial llega a los océanos para desarrollar predicciones sobre el nivel del mar.Highly Cited Researcher 2021https://isp.uv.es/github/news/2021/highly-cited-researcher-2021/Fri, 01 Jan 2021 00:00:00 +0000https://isp.uv.es/github/news/2021/highly-cited-researcher-2021/Gustau Camps-Valls was named a Highly Cited Researcher 2021 by Clarivate for Academia and Government WoS in recognition of his contribution to AI for the geosciences and scientific enthusiasm! We are all happy of being part of this great team at ISP, participating in inspiring research projects and promoting international collaboration networks, Gustau. Congrats!Horizon 2020 EU iMIRACLIhttps://isp.uv.es/github/news/2020/horizon-2020-imiracli/Wed, 01 Jan 2020 00:00:00 +0000https://isp.uv.es/github/news/2020/horizon-2020-imiracli/Starting on January 2020, - Horizon-H2020 EU iMIRACLI Bringing together leading climate and machine learning experts across Europe to train a new next generation of climate data scientists.IEEE Distinguished Lecturerhttps://isp.uv.es/github/news/2020/ieee-distinguished-lecturer/Wed, 01 Jan 2020 00:00:00 +0000https://isp.uv.es/github/news/2020/ieee-distinguished-lecturer/Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).Interview in Naturehttps://isp.uv.es/github/news/2020/interview-in-nature/Wed, 01 Jan 2020 00:00:00 +0000https://isp.uv.es/github/news/2020/interview-in-nature/Prof. Gustau Camps-Valls was interviewed about the international research published in Nature, which provides a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks.Member of ELISE Networkhttps://isp.uv.es/github/news/2020/elise-network-member/Wed, 01 Jan 2020 00:00:00 +0000https://isp.uv.es/github/news/2020/elise-network-member/ISP Member of the European Network of Artificial Intelligence Excellence Centres ELISE.Mètode TV SEDALhttps://isp.uv.es/github/news/2020/metode-tv-sedal/Wed, 01 Jan 2020 00:00:00 +0000https://isp.uv.es/github/news/2020/metode-tv-sedal/Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.Cool Toolboxeshttps://isp.uv.es/github/news/2019/cool-toolboxes/Tue, 01 Jan 2019 00:00:00 +0000https://isp.uv.es/github/news/2019/cool-toolboxes/Cool! Impressive progress in our toolboxes - Gaussian processes repository &amp; many more. Give it a try! Feedback welcome.Jesús Malo Full Professorshiphttps://isp.uv.es/github/news/2018/jesus-malo-professorship/Sun, 01 Jul 2018 00:00:00 +0000https://isp.uv.es/github/news/2018/jesus-malo-professorship/Jesús Malo got the full professorship in Vision Science at the Department of Physics in July 2018.Group Growinghttps://isp.uv.es/github/news/2018/group-growing/Mon, 01 Jan 2018 00:00:00 +0000https://isp.uv.es/github/news/2018/group-growing/The group is growing quite a lot! New people with lots of ideas, background and expertises. Welcome Jose Enrique, Jose Juan, Diego, Dan, Emiliano, and Emma! Check their profiles and exciting research here!H-index 60https://isp.uv.es/github/news/2018/h-index-60/Mon, 01 Jan 2018 00:00:00 +0000https://isp.uv.es/github/news/2018/h-index-60/We reached an h-index of 60. Follow our research here!IEEE Fellowhttps://isp.uv.es/github/news/2018/ieee-fellow/Mon, 01 Jan 2018 00:00:00 +0000https://isp.uv.es/github/news/2018/ieee-fellow/Professor Camps-Valls was elevated to the grade of IEEE Fellow by the Geoscience and Remote Sensing Society, and also recognized by the Signal Processing Society.Jesús Malo Associate Editorhttps://isp.uv.es/github/news/2017/jesus-malo-editor/Mon, 01 May 2017 00:00:00 +0000https://isp.uv.es/github/news/2017/jesus-malo-editor/Prof. Jesús Malo has been appointed as Associate Editor of Frontiers in Neuroscience (IF = 3.4), section Perception Science (May 2017).Composite Kernels Classic Paperhttps://isp.uv.es/github/news/2017/composite-kernels/Sun, 01 Jan 2017 00:00:00 +0000https://isp.uv.es/github/news/2017/composite-kernels/We are a classic! &ldquo;Composite kernels for hyperspectral image classification&rdquo; published in IEEE Geoscience and Remote Sensing Letters in 2006 is one of the highly-cited papers in its area of research, maintaining its impact long after publication.DSP with Kernel Methods Bookhttps://isp.uv.es/github/news/2017/dsp-kernel-methods/Sun, 01 Jan 2017 00:00:00 +0000https://isp.uv.es/github/news/2017/dsp-kernel-methods/We are excited about the publication of our new book, entitled &ldquo;Digital Signal Processing with Kernel Methods&rdquo;, by Rojo-Álvarez, J.L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G., published by Wiley and sons.Geoscience Committeehttps://isp.uv.es/github/news/2017/geoscience-committee/Sun, 01 Jan 2017 00:00:00 +0000https://isp.uv.es/github/news/2017/geoscience-committee/Maria Piles (president) and Gustau Camps-Valls (member) in the Executive Committee of the Spain Geoscience and Remote Sensing Chapter. Let&rsquo;s do things to promote technology and science in remote sensing!Global Carbon Sinks Researchhttps://isp.uv.es/github/news/2017/global-carbon-sinks/Sun, 01 Jan 2017 00:00:00 +0000https://isp.uv.es/github/news/2017/global-carbon-sinks/An international research involving the Universitat de València just provided a new vision in the quest to determine whether temperature or water availability is the most influential factor in determining the success of global, land-based carbon sinks. The results are published in Nature with Gustau Camps-Valls as co-author.Mètode TV SEDALhttps://isp.uv.es/github/news/2017/metode-tv-sedal-2017/Sun, 01 Jan 2017 00:00:00 +0000https://isp.uv.es/github/news/2017/metode-tv-sedal-2017/Mètode TV SEDAL: From MètodeTV, the channel of the Universitat de València, SEDAL is introduced to the public.Analog Devices 2016https://isp.uv.es/github/news/2016/analog-devices-2016/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/analog-devices-2016/Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations. Welcome back, Javi!Analog Devices Moveshttps://isp.uv.es/github/news/2016/analog-devices-moves/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/analog-devices-moves/Analog Devices moves to the Science Park of the UV. New facilities and closer collaborations!Dagstuhl Seminar 2016https://isp.uv.es/github/news/2016/dagstuhl-seminar-2016/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/dagstuhl-seminar-2016/Dagstuhl seminar on &ldquo;Computer Science meets Ecology&rdquo; gathering the best researchers of both worlds in a unique environment for fruitful discussions.Dagstuhl Seminar 2016https://isp.uv.es/github/news/2016/dagstuhl-seminar/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/dagstuhl-seminar/We organized a Dagstuhl seminar on &ldquo;Computer Science meets Ecology&rdquo; gathering the best researchers of both worlds in a unique environment for fruitful discussions.ERC Consolidator Granthttps://isp.uv.es/github/news/2016/erc-consolidator-grant/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/erc-consolidator-grant/Gustau Camps-Valls recently got an ERC consolidator grant (ERC-CoG) with the project &ldquo;Statistical Learning for Remote Sensing Data Analysis&rdquo; (SEDAL) for the period 2015-2020.Google Award 2015https://isp.uv.es/github/news/2016/google-award-2015/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/google-award-2015/Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.Google Earth Engine Awardhttps://isp.uv.es/github/news/2016/google-earth-engine-award/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/google-earth-engine-award/Luis Gómez-Chova received a Google Earth Engine Research Award 2015 to foster research in cloud detection.GRSM Special Issuehttps://isp.uv.es/github/news/2016/grsm-special-issue/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/grsm-special-issue/Gustau Camps-Valls co-edits a Special Issue on the IEEE Geoscience and Remote Sensing Magazine (GRSM).Hyperspectral Imaging Special Issuehttps://isp.uv.es/github/news/2016/hyperspectral-imaging/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/hyperspectral-imaging/Gustau Camps-Valls co-edits a Special Issue titled &lsquo;Hyperspectral Imaging and Image Processing&rsquo; for the journal Sensing and Imaging (Springer).IEEE Distinguished Lecturer 2016https://isp.uv.es/github/news/2016/ieee-lecturer-2016/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/ieee-lecturer-2016/Prof. Gustau Camps-Valls was appointed as IEEE Distinguished Lecturer of the Geoscience and Remote Sensing Society (GRSS).IEEE GRSS Data Fusion Contesthttps://isp.uv.es/github/news/2016/data-fusion-contest/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/data-fusion-contest/We won the 2015 IEEE GRSS Data Fusion Contest with a paper on the statistical analysis of the optical and LiDAR complementary information.IGARSS 2018 Organizationhttps://isp.uv.es/github/news/2016/igarss-2018-organization/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/igarss-2018-organization/We organized here in València the biggest IEEE remote sensing and geoscience conference, IGARSS, in 2018.Jesús Malo Academic Editorhttps://isp.uv.es/github/news/2016/academic-editor-plos/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/academic-editor-plos/Jesús Malo has been appointed as Academic Editor at PLoS ONE for the period 2014-2017.Learning Material Publicationhttps://isp.uv.es/github/news/2016/learning-material/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/learning-material/We published learning material on geoscience and remote sensing data processing in IEEE GRSS training material and many more online available!Maria Piles Ramón y Cajal Fellowshiphttps://isp.uv.es/github/news/2016/maria-piles-fellowship/Fri, 01 Jan 2016 00:00:00 +0000https://isp.uv.es/github/news/2016/maria-piles-fellowship/Dr. Maria Piles joins the ISP group through a prestigious &ldquo;Ramón y Cajal&rdquo; fellowship next January 2017. The ISP consolidates the research line of retrieval of soil moisture and vegetation biogeophysical parameters from space observations (microwave radiometers, radars and hyperspectral imagers). Welcome Maria! \ No newline at end of file diff --git a/people/alumni/amparo_gil/index.html b/people/alumni/amparo_gil/index.html index d64a53cb..ba996006 100644 --- a/people/alumni/amparo_gil/index.html +++ b/people/alumni/amparo_gil/index.html @@ -2,4 +2,4 @@

Amparo Gil

\ No newline at end of file +

Amparo Gil

\ No newline at end of file diff --git a/people/alumni/anna_mateo/index.html b/people/alumni/anna_mateo/index.html index eceb69e0..fe44a66d 100644 --- a/people/alumni/anna_mateo/index.html +++ b/people/alumni/anna_mateo/index.html @@ -2,4 +2,4 @@

Anna Maria Mateo

\ No newline at end of file +

Anna Maria Mateo

\ No newline at end of file diff --git a/people/alumni/benyamin_kheradvar/index.html b/people/alumni/benyamin_kheradvar/index.html index 43764405..16d513d3 100644 --- a/people/alumni/benyamin_kheradvar/index.html +++ b/people/alumni/benyamin_kheradvar/index.html @@ -2,4 +2,4 @@

Benyamin Kheradvar

\ No newline at end of file +

Benyamin Kheradvar

\ No newline at end of file diff --git a/people/alumni/borja_galan/index.html b/people/alumni/borja_galan/index.html index 26610a86..9747db27 100644 --- a/people/alumni/borja_galan/index.html +++ b/people/alumni/borja_galan/index.html @@ -2,4 +2,4 @@

Borja Galán

\ No newline at end of file +

Borja Galán

\ No newline at end of file diff --git a/people/alumni/daniel_heestermans/index.html b/people/alumni/daniel_heestermans/index.html index 659beadd..7d2d6572 100644 --- a/people/alumni/daniel_heestermans/index.html +++ b/people/alumni/daniel_heestermans/index.html @@ -2,4 +2,4 @@

Daniel Heestermans Svendsen

\ No newline at end of file +

Daniel Heestermans Svendsen

\ No newline at end of file diff --git a/people/alumni/devis_tuia/index.html b/people/alumni/devis_tuia/index.html index d18dcccb..184fbbd2 100644 --- a/people/alumni/devis_tuia/index.html +++ b/people/alumni/devis_tuia/index.html @@ -2,4 +2,4 @@

Devis Tuia

\ No newline at end of file +

Devis Tuia

\ No newline at end of file diff --git a/people/alumni/diego_bueso/index.html b/people/alumni/diego_bueso/index.html index da7e1121..ad4835fa 100644 --- a/people/alumni/diego_bueso/index.html +++ b/people/alumni/diego_bueso/index.html @@ -2,4 +2,4 @@

Diego Bueso

\ No newline at end of file +

Diego Bueso

\ No newline at end of file diff --git a/people/alumni/emma_izquierdo/index.html b/people/alumni/emma_izquierdo/index.html index 485a8354..ce3bfa2d 100644 --- a/people/alumni/emma_izquierdo/index.html +++ b/people/alumni/emma_izquierdo/index.html @@ -2,4 +2,4 @@

Emma Izquierdo

\ No newline at end of file +

Emma Izquierdo

\ No newline at end of file diff --git a/people/alumni/emmanuel_johnson/index.html b/people/alumni/emmanuel_johnson/index.html index 824adaeb..3a770ce1 100644 --- a/people/alumni/emmanuel_johnson/index.html +++ b/people/alumni/emmanuel_johnson/index.html @@ -2,4 +2,4 @@

J. Emmanuel Johnson

\ No newline at end of file +

J. Emmanuel Johnson

\ No newline at end of file diff --git a/people/alumni/fatih_nar/index.html b/people/alumni/fatih_nar/index.html index 93ae3a62..a6030673 100644 --- a/people/alumni/fatih_nar/index.html +++ b/people/alumni/fatih_nar/index.html @@ -2,4 +2,4 @@

Fatih Nar

\ No newline at end of file +

Fatih Nar

\ No newline at end of file diff --git a/people/alumni/francesca_bovolo/index.html b/people/alumni/francesca_bovolo/index.html index 80992a92..bcdcbde3 100644 --- a/people/alumni/francesca_bovolo/index.html +++ b/people/alumni/francesca_bovolo/index.html @@ -2,4 +2,4 @@

Francesca Bovolo

\ No newline at end of file +

Francesca Bovolo

\ No newline at end of file diff --git a/people/alumni/gabriel_gomez/index.html b/people/alumni/gabriel_gomez/index.html index 18853564..1e2461aa 100644 --- a/people/alumni/gabriel_gomez/index.html +++ b/people/alumni/gabriel_gomez/index.html @@ -2,4 +2,4 @@

Gabriel Gómez

\ No newline at end of file +

Gabriel Gómez

\ No newline at end of file diff --git a/people/alumni/helena_burriel/index.html b/people/alumni/helena_burriel/index.html index a90e7e38..1eeb38aa 100644 --- a/people/alumni/helena_burriel/index.html +++ b/people/alumni/helena_burriel/index.html @@ -2,4 +2,4 @@

Helena Burriel

\ No newline at end of file +

Helena Burriel

\ No newline at end of file diff --git a/people/alumni/irene_epifanio/index.html b/people/alumni/irene_epifanio/index.html index b66c5681..1438b28c 100644 --- a/people/alumni/irene_epifanio/index.html +++ b/people/alumni/irene_epifanio/index.html @@ -2,4 +2,4 @@

Irene Epifanio

\ No newline at end of file +

Irene Epifanio

\ No newline at end of file diff --git a/people/alumni/irene_martin/index.html b/people/alumni/irene_martin/index.html index 22a0e9ec..720b32d4 100644 --- a/people/alumni/irene_martin/index.html +++ b/people/alumni/irene_martin/index.html @@ -2,4 +2,4 @@

Irene Martin

\ No newline at end of file +

Irene Martin

\ No newline at end of file diff --git a/people/alumni/jose_padron/index.html b/people/alumni/jose_padron/index.html index a9c05768..b1b45f88 100644 --- a/people/alumni/jose_padron/index.html +++ b/people/alumni/jose_padron/index.html @@ -2,4 +2,4 @@

Jose Antonio Padrón

\ No newline at end of file +

Jose Antonio Padrón

\ No newline at end of file diff --git a/people/alumni/jose_rovira/index.html b/people/alumni/jose_rovira/index.html index e942b949..5a75d5a3 100644 --- a/people/alumni/jose_rovira/index.html +++ b/people/alumni/jose_rovira/index.html @@ -2,4 +2,4 @@

Jose Rovira

\ No newline at end of file +

Jose Rovira

\ No newline at end of file diff --git a/people/alumni/juan_gutierrez/index.html b/people/alumni/juan_gutierrez/index.html index 2364bd13..5793a2b4 100644 --- a/people/alumni/juan_gutierrez/index.html +++ b/people/alumni/juan_gutierrez/index.html @@ -2,4 +2,4 @@

Juan Gutiérrez

\ No newline at end of file +

Juan Gutiérrez

\ No newline at end of file diff --git a/people/alumni/koray/index.html b/people/alumni/koray/index.html index 4c1441d4..5560f5d3 100644 --- a/people/alumni/koray/index.html +++ b/people/alumni/koray/index.html @@ -2,4 +2,4 @@

Koray Çiftçi

\ No newline at end of file +

Koray Çiftçi

\ No newline at end of file diff --git a/people/alumni/luca_capobianco/index.html b/people/alumni/luca_capobianco/index.html index 78279a8a..8ddaa6f7 100644 --- a/people/alumni/luca_capobianco/index.html +++ b/people/alumni/luca_capobianco/index.html @@ -2,4 +2,4 @@

Luca Capobianco

\ No newline at end of file +

Luca Capobianco

\ No newline at end of file diff --git a/people/alumni/luca_martino/index.html b/people/alumni/luca_martino/index.html index 4bcb499b..3a3141d7 100644 --- a/people/alumni/luca_martino/index.html +++ b/people/alumni/luca_martino/index.html @@ -2,4 +2,4 @@

Luca Martino

\ No newline at end of file +

Luca Martino

\ No newline at end of file diff --git a/people/alumni/manuel_taberner/index.html b/people/alumni/manuel_taberner/index.html index 2eff2b23..1859d6db 100644 --- a/people/alumni/manuel_taberner/index.html +++ b/people/alumni/manuel_taberner/index.html @@ -2,4 +2,4 @@

Manuel Campos-Taberner

\ No newline at end of file +

Manuel Campos-Taberner

\ No newline at end of file diff --git a/people/alumni/mara_diez/index.html b/people/alumni/mara_diez/index.html index bd1859dc..260b272d 100644 --- a/people/alumni/mara_diez/index.html +++ b/people/alumni/mara_diez/index.html @@ -2,4 +2,4 @@

Mara Díez

\ No newline at end of file +

Mara Díez

\ No newline at end of file diff --git a/people/alumni/marcelo_armengot/index.html b/people/alumni/marcelo_armengot/index.html index 4569879f..b75f9918 100644 --- a/people/alumni/marcelo_armengot/index.html +++ b/people/alumni/marcelo_armengot/index.html @@ -2,4 +2,4 @@

Marcelo Armengot

\ No newline at end of file +

Marcelo Armengot

\ No newline at end of file diff --git a/people/alumni/marina_garcia/index.html b/people/alumni/marina_garcia/index.html index 54a01820..c5833d4f 100644 --- a/people/alumni/marina_garcia/index.html +++ b/people/alumni/marina_garcia/index.html @@ -2,4 +2,4 @@

Marina Martínez-García

\ No newline at end of file +

Marina Martínez-García

\ No newline at end of file diff --git a/people/alumni/mattia_marconcini/index.html b/people/alumni/mattia_marconcini/index.html index 2db95b53..4db57fa5 100644 --- a/people/alumni/mattia_marconcini/index.html +++ b/people/alumni/mattia_marconcini/index.html @@ -2,4 +2,4 @@

Mattia Marconcini

\ No newline at end of file +

Mattia Marconcini

\ No newline at end of file diff --git a/people/alumni/michele_ronco/index.html b/people/alumni/michele_ronco/index.html index 3c5c737d..bb30adb1 100644 --- a/people/alumni/michele_ronco/index.html +++ b/people/alumni/michele_ronco/index.html @@ -2,4 +2,4 @@

Michele Ronco

\ No newline at end of file +

Michele Ronco

\ No newline at end of file diff --git a/people/alumni/qiang_li/index.html b/people/alumni/qiang_li/index.html index 180054e7..50f8e191 100644 --- a/people/alumni/qiang_li/index.html +++ b/people/alumni/qiang_li/index.html @@ -2,4 +2,4 @@

Qiang Li

\ No newline at end of file +

Qiang Li

\ No newline at end of file diff --git a/people/alumni/qiang_wang/index.html b/people/alumni/qiang_wang/index.html index 83405050..4e4f2810 100644 --- a/people/alumni/qiang_wang/index.html +++ b/people/alumni/qiang_wang/index.html @@ -2,4 +2,4 @@

Qiang Wang

\ No newline at end of file +

Qiang Wang

\ No newline at end of file diff --git a/people/alumni/raul_santos/index.html b/people/alumni/raul_santos/index.html index e7f25bcd..42358462 100644 --- a/people/alumni/raul_santos/index.html +++ b/people/alumni/raul_santos/index.html @@ -2,4 +2,4 @@

Raul Santos-Rodríguez

\ No newline at end of file +

Raul Santos-Rodríguez

\ No newline at end of file diff --git a/people/alumni/sal_catsis/index.html b/people/alumni/sal_catsis/index.html index 134884a2..7cf177fa 100644 --- a/people/alumni/sal_catsis/index.html +++ b/people/alumni/sal_catsis/index.html @@ -2,4 +2,4 @@

Sal Catsis

\ No newline at end of file +

Sal Catsis

\ No newline at end of file diff --git a/people/alumni/sandra_jimenez/index.html b/people/alumni/sandra_jimenez/index.html index d3aaed76..f2191007 100644 --- a/people/alumni/sandra_jimenez/index.html +++ b/people/alumni/sandra_jimenez/index.html @@ -2,4 +2,4 @@

Sandra Jiménez

\ No newline at end of file +

Sandra Jiménez

\ No newline at end of file diff --git a/people/alumni/soulivanh/index.html b/people/alumni/soulivanh/index.html index 5b8ceb74..2fe9f89f 100644 --- a/people/alumni/soulivanh/index.html +++ b/people/alumni/soulivanh/index.html @@ -2,4 +2,4 @@

Soulivanh Thao

\ No newline at end of file +

Soulivanh Thao

\ No newline at end of file diff --git a/people/alumni/vicent_talens/index.html b/people/alumni/vicent_talens/index.html index c997f889..838c0462 100644 --- a/people/alumni/vicent_talens/index.html +++ b/people/alumni/vicent_talens/index.html @@ -2,4 +2,4 @@

Vicent Talens

\ No newline at end of file +

Vicent Talens

\ No newline at end of file diff --git a/people/alumni/yolanda_navarro/index.html b/people/alumni/yolanda_navarro/index.html index 72bae315..870795cf 100644 --- a/people/alumni/yolanda_navarro/index.html +++ b/people/alumni/yolanda_navarro/index.html @@ -2,4 +2,4 @@

Yolanda Navarro

\ No newline at end of file +

Yolanda Navarro

\ No newline at end of file diff --git a/people/assistant_professors/adrian_perez_suay/index.html b/people/assistant_professors/adrian_perez_suay/index.html index 66b075e3..afcdeb39 100644 --- a/people/assistant_professors/adrian_perez_suay/index.html +++ b/people/assistant_professors/adrian_perez_suay/index.html @@ -2,4 +2,4 @@

Adrián Pérez-Suay

\ No newline at end of file +

Adrián Pérez-Suay

\ No newline at end of file diff --git a/people/assistant_professors/ana_ruescas/index.html b/people/assistant_professors/ana_ruescas/index.html index 947e4b60..d2b232e2 100644 --- a/people/assistant_professors/ana_ruescas/index.html +++ b/people/assistant_professors/ana_ruescas/index.html @@ -2,4 +2,4 @@

Ana B. Ruescas

\ No newline at end of file +

Ana B. Ruescas

\ No newline at end of file diff --git a/people/assistant_professors/jose_adsuara/index.html b/people/assistant_professors/jose_adsuara/index.html index e7553d3e..4ea248d5 100644 --- a/people/assistant_professors/jose_adsuara/index.html +++ b/people/assistant_professors/jose_adsuara/index.html @@ -2,4 +2,4 @@

José Enrique Adsuara

\ No newline at end of file +

José Enrique Adsuara

\ No newline at end of file diff --git a/people/assistant_professors/jose_esteve/index.html b/people/assistant_professors/jose_esteve/index.html index 06cf0fc5..9ca159ff 100644 --- a/people/assistant_professors/jose_esteve/index.html +++ b/people/assistant_professors/jose_esteve/index.html @@ -2,4 +2,4 @@

José J. Esteve-Taboada

\ No newline at end of file +

José J. Esteve-Taboada

\ No newline at end of file diff --git a/people/assistant_professors/julia_amoros/index.html b/people/assistant_professors/julia_amoros/index.html index 3267b957..c2f525af 100644 --- a/people/assistant_professors/julia_amoros/index.html +++ b/people/assistant_professors/julia_amoros/index.html @@ -2,4 +2,4 @@

Julia Amorós

\ No newline at end of file +

Julia Amorós

\ No newline at end of file diff --git a/people/assistant_professors/maria_piles/index.html b/people/assistant_professors/maria_piles/index.html index 97935d5e..7a2d39b2 100644 --- a/people/assistant_professors/maria_piles/index.html +++ b/people/assistant_professors/maria_piles/index.html @@ -2,4 +2,4 @@

Maria Piles

\ No newline at end of file +

Maria Piles

\ No newline at end of file diff --git a/people/assistant_professors/roberto_fernandez/index.html b/people/assistant_professors/roberto_fernandez/index.html index e039766b..73007d22 100644 --- a/people/assistant_professors/roberto_fernandez/index.html +++ b/people/assistant_professors/roberto_fernandez/index.html @@ -2,4 +2,4 @@

Roberto Fernandez-Moran

\ No newline at end of file +

Roberto Fernandez-Moran

\ No newline at end of file diff --git a/people/assistant_professors/valero_laparra/index.html b/people/assistant_professors/valero_laparra/index.html index 7f4b0f34..0c312aad 100644 --- a/people/assistant_professors/valero_laparra/index.html +++ b/people/assistant_professors/valero_laparra/index.html @@ -2,4 +2,4 @@

Valero Laparra

\ No newline at end of file +

Valero Laparra

\ No newline at end of file diff --git a/people/associate_professors/javier_calpe/index.html b/people/associate_professors/javier_calpe/index.html index ca4a5a29..cc7cefd6 100644 --- a/people/associate_professors/javier_calpe/index.html +++ b/people/associate_professors/javier_calpe/index.html @@ -2,4 +2,4 @@

Javier Calpe-Maravilla

\ No newline at end of file +

Javier Calpe-Maravilla

\ No newline at end of file diff --git a/people/associate_professors/jordi_munoz/index.html b/people/associate_professors/jordi_munoz/index.html index 764d1837..27ba5472 100644 --- a/people/associate_professors/jordi_munoz/index.html +++ b/people/associate_professors/jordi_munoz/index.html @@ -2,4 +2,4 @@

Jordi Muñoz-Marí

\ No newline at end of file +

Jordi Muñoz-Marí

\ No newline at end of file diff --git a/people/full_professors/gustau_camps_valls/index.html b/people/full_professors/gustau_camps_valls/index.html index e5963427..299e32b3 100644 --- a/people/full_professors/gustau_camps_valls/index.html +++ b/people/full_professors/gustau_camps_valls/index.html @@ -2,4 +2,4 @@

Gustau Camps-Valls

\ No newline at end of file +

Gustau Camps-Valls

\ No newline at end of file diff --git a/people/full_professors/jesus_malo/ex_cathedra/index.html b/people/full_professors/jesus_malo/ex_cathedra/index.html index 62bddcb2..8d77c267 100644 --- a/people/full_professors/jesus_malo/ex_cathedra/index.html +++ b/people/full_professors/jesus_malo/ex_cathedra/index.html @@ -2,5 +2,5 @@

First words ex-cathedra

Jesús Malo (San Francisco, Starbucks at 390 Stockton St., February 2015)

Circa 2015, applications for full professorship in Spanish universities (cathedra) involved writing an essay to describe your career and personal views on science. Here is what I wrote to get the condition of Accredited University Professor from the official National Evaluation Agency…

Now (after the positive outcome in July 2015), I upload the version with uncensored pictures, full text, and over 150 hyperlinks!. These are my first words ex-cathedra (even though my salary, as well as the salary of over 2500 colleagues in the same situation, will remain the same for a while unless we do something):


Table of Contents


1. Why a physicist would ever care about Human Vision?

Think again: human vision is cool!

The leit-motif of my research and teaching activity is the study of visual information processing in the human brain. This is a biological and subjective problem: not very appealing adjectives for a big-bang theory guy. Nevertheless, the aspects of this problem that may be of interest for physicists determined the direction of my scientific career.

Despite the overuse of the word multidisciplinary, you have to consider that Visual Perception is a truly multidisciplinary problem. On the one hand, the input signal certainly involves plain Physics such as light emission and scattering in every-day scenes (classical Radiometry) and image formation in biological systems (classical Physiological Optics). However, on the other hand, the analysis of such input signal is a problem for Neuroscience: examples of the latter include the study of (natural) neural networks for image understanding. Human Vision is not at all limited to the laws of image formation, that basically date back to Newton classical Optics, but also include the formulation of laws that determine the organization of the sensors that make sense of these signals. And this is a quite different issue!. Regarding this analysis part, a theory that explains the visual cortex phenomena requires concepts coming from Statistics and Information Theory, or in nowadays jargon, Machine Learning. A particularly interesting feature of this problem is the fact that, as opposed to other science problems, the relation between maths and application (here Maths and Neuroscience) is not one-directional: in this case the system to be understood is actually a computing machine that may also inspire original mathematical approaches. Finally, the models coming from Theoretical Neuroscience may be applied in Electrical Engineering and Computer Science.

From a personal (and hence arguable) point of view, the Human Vision problem is interesting for a physicist not for the aspects related to classical Optics (fundamentally solved long ago), but for the study of the visual brain. Vision is not in the (well known) eye of the beholder, but in his/her (highly unknown) brain. The visual brain is a natural system with complex dynamics (the jargon physicists love), quantitative theories for partial explanations are very recent, and many of them are still under discussion. The study of Vision combines experiments, mathematical theories and technological applications, and this combination is the core of how the physicists approach the problems. It doesn’t matter that the experimental methods come from the Psychology, the Optometry or the Neurophysiology (all of them use the so called Psycho-Physics) or that the applications are in Image Processing and Computer Vision: the study of the Human Visual System is certainly quite appropriate for a physicist.

The fascination for the surprising behavior of the visual system is what determined my scientific exploration: over the last 20 years I made some contributions (or managed to introduce some colored noise 😉 in most of the disciplines cited above.


2. Chronological summary of my career

While Khun and Marx were kind of wrong, Sinatra was right: I did it my way!

Selecting a multidisciplinary problem implies having a wide range of collaborators over the years. The topics and the collaborators to address them are the parts of the scientific career that one can actually choose. Thomas Kuhn (or even Karl Marx) would certainly say that economic constraints sometimes impose their own choices. In my case, even though money sometimes determined the order in which I visited different aspects of the problem (e.g., applications before foundations), economic constraints didn’t imply modifications in the selected direction since I was fortunate enough to get steady funds along these two decades (more details on economic constraints below).

Constraints are usually harder in the teaching part since it is determined by the duties of the department where you happen to develop your research. Nevertheless, with some dedication, this part can also be modulated. Similarly to the research side (where I started at the Optics Department in the Physics School, but then I looked for collaborators in Maths, Electrical Engineering, and Computer Science), in the teaching side I decided to give lectures in PhD and Master programs out of my department (beyond the department-related duties). This was a way to convey the knowledge acquired in research activities to a broader audience.

Below is the list of multidisciplinary collaborators I found (or looked for) over time. Note that in the formative years and right after getting my first permanent position, I focused on applications (e.g., image coding) to maximize funding probabilities. More recently, particularly after my second Spanish NSF Project as PI, I turned to the fundamental issues (the theory and the consideration of a higher abstraction level, as for instance in the current Explora Project - my 4th as PI), yet still paying attention to technology transfer:

As a summary, despite the troubles of a truly multidisciplinary topic, the path has been (kind of) coherent and successful. At this point, I have to thank Prof. Jose María Artigas for his lectures on Physics of Vision: a small course for the physics students which (unfortunately!) is no longer available in my university. In those lessons, he told us about something completely different. As fresh and educative as the Monty Python for physics students.


3. My research contributions

Colored noise in vision sciences and some thoughts on the h-index

  • Experiments in vision science
  • Theory: empirical models in vision science
  • Theory: principled models in vision science (computational visual neuroscience)
  • Theory: statistical learning
  • Applications in image processing
  • Preliminary conclusions
  • Impact of the above: h-index or just colored noise?

Experiments in Vision Science (7 JCR publications)

I made experimental contributions in three aspects: Physiological Optics, Psychophysics, and Image Statistics. (i) In the field of Physiological Optics, we measured the optical transfer function of the lens+cornea system in-vivo Opth.Phys.Opt.97. This work received the European Vistakon Research Award 94’. (ii) In Psychophysics, we proposed simplified methods to measure the Contrast Sensitivity Function in all the frequency domain J.Opt.94, and a fast and accurate method to measure the parameters of multi-stage linear+nonlinear vision models Proc.SPIE15. Finally, (iii) in Image Statistics we gathered spatially and spectrally calibrated image samples to determine the properties of these signals and their variation under changes in illumination, contrast, and motion Im.Vis.Comp.00, Neur.Comp.12, IEEE-TGRS14, PLoS-ONE14, Rem.Sens.Im.Proc.11, Front.Neurosci.15.

Theory: empirical models in Vision Science (8 JCR publications)

We proposed mathematical descriptions of different visual dimensions: Texture, Color, and Motion. (i) We used wavelet representations to propose nonstationary Texture Vision models J.Mod.Opt.97, MScThesis95. (ii) We developed Color Vision models with illumination invariance that allow the reproduction of chromatic anomalies, adaptation, and aftereffects Vis.Res.97, J.Opt.96, J. Opt.98, JOSA04, Neur.Comp.12. (iii) Motion Vision models Alheteia08 focus on optical flow computation in perceptually relevant moving regions J.Vis.01, PhDThesis99, and explain the static motion aftereffect Front.Neurosci.15.

All these psychophysical and physiological models have a parallel linear+nonlinear structure where receptive fields and surround-dependent normalization play an important role.

Theory: principled models in Vision Science (12 JCR publications)

This category refers to the proposition of organization laws of sensory systems that explain empirical phenomena, showing how neural function is adapted to the statistics of visual stimuli. (i) We worked on the derivation of the linear properties of the sensors, finding that spatio-chromatic sensitivity, receptive field changes, and phase properties arise from optimal solutions to the adaptation problem under noise constraints and manifold matching PLoS-ONE14, IEEE-TGRS13, from statistical independence requirements LNCS11, NeuroImag.Meeting11, and from optimal estimation of object reflectance IEEE-TGRS14. (ii) We also derived the non-linear behavior of visual sensors like chromatic, texture, and motion sensors, linking non-linearities to optimal information transmission and/or error minimization in noisy systems Network06, Neur.Comp.12, Front.Neurosci.15, J.Opt.95, Im.Vis.Comp.00, LNCS00, Patt.Recog.03, Neur.Comp.10, LNCS10, NeuroImag.Meeting11.

Theory: Statistical Learning (7 JCR publications)

In theoretical neuroscience the derivation of properties of biological sensors from the regularities visual scenes requires novel tools for statistical learning. In this field, we developed new techniques for unsupervised manifold learning, feature extraction (or symmetry detection in datasets), dimensionality reduction, probability density estimation, multi-information estimation, distance learning, and automatic adaptation from optimal dataset matching. Given my interest in applicability in Vision Science problems, I focused on techniques that can be explicitly represented in the image domain to be compared with receptive fields of visual neurons, as opposed to the usual practice in the Machine Learning community. Techniques include Rotation-based Iterative Gaussianization -RBIG- IEEE TNN 11, Sequential Principal Curves Analysis -SPCA- Network06, Neur.Comp.12, Front. Neurosci.15, Principal Polynomial Analysis -PPA- Int.J.Neur.Syst.14, Dimensionality Reduction based on Regression -DRR- IEEE JSTSP15, and Graph Matching for Adaptation IEEE TGRS13.

Applications: Image Processing (24 JCR publications + 1 patent)

We proposed original image processing techniques using both perception models and image statistics including (i) improvements of JPEG standard for image coding through nonlinear texture vision models Electr.Lett.95, Electr.Lett.99, IEEE TNN05, IEEE TIP06a, JMLR08, RPSP12, Patent08, (ii) improvements of MPEG standard for video coding with new perceptual quantization scheme and new motion estimation focused on perceptually relevant optical flow LNCS97, Electr.Lett.98, Electr.Lett.00a, Electr.Lett.00b, IEEE TIP01, Redund.Reduct.99, (iii) new image restoration techniques based on nonlinear contrast perception models and the image statistics in local frequency domains IEEE TIP 06b, JMLR10; (iv) new approaches to color constancy either based on relative chromatic descriptors Vis.Res.97, J.Opt.96, statistically-based chromatic adaptation models Neur.Comp.12, PLoS-ONE14, or Bayesian estimation of surface reflectance IEEE-TGRS14; (v) new subjective image and video distortion measures using nonlinear perception models Im.Vis.Comp.97, Disp.99, IEEE ICIP02, JOSA10, Proc.SPIE15; and (vi) image classification and knowledge extraction (or regression) based on our feature extraction techniques IEEE-TNN11, IEEE-TGRS13,Int.J.Neur.Syst.14, IEEE-JSTSP15. See code for image and video processing applications here.

Preliminary Conclusions

  • The visual brain is astonishingly well adapted to the natural visual world. This sentence shouldn’t be surprising for any teenager that heard about Charles Darwin. The cool thing in that conclusion was preparing accurate image data, developing the appropriate mathematical tools to derive the behavior described by computational models as seen in psychophysical illustrations. By putting all this together in a single piece of code you realize that the statement is true.

  • Appropriate (mathematical) formulation of visual phenomena is the only way to understand the problem and to derive applications. This statement is not very original either, given the famous Galileo Galilei quote [the book of nature is written in mathematical language]. However, in this multidisciplinary world, a special effort has to be done to translate physiological facts into models that work on, lets say, actual video sequences. By doing so, you transcend the specific details of a set of experiments, and think about all the additional problems faced (and solved) by the visual brain. Numerical simulations are useful to put a specific physiological behavior in perspective. Moreover, well-formulated models allow us to explore new experimental questions through the appropriate stimuli. Not to speak about the straightforward use in image processing and computer vision…

  • Nonlinear techniques are fancy, but it is amazing the percentage of reality that we can explain with linear models. Another old-fashion statement for eigenvector lovers. Besides, linear algebra is easy! For instance, a simple rotation (the Principal Component Analysis of Karl Pearson) applied to small patches of natural sequences, explains the major features of the receptive fields of LGN-V1 visual neurons. This includes opponent color coding, neurons tuned to spatial texture, and motion-sensitive neurons. Different kinds of simple affine transforms (linear scaling and translations) explain basic sensitivity to color, texture, and motion as well as the basic trends of adaptation. Amazing!

  • We roughly understand low-level visual information processing in the brain. However, there is still a long way to understand how we derive abstract concepts from low-level primitives. Not a surprising statement either if you saw the appropriate documentary or heard about David Marr. Despite all the knowledge about color, spatial texture, motion, and depth information processing in LGN, V1, and MT, little is known about how these pieces are put together in other parts of the brain (e.g. IT). What are the organization laws of these higher abstraction mechanisms? What about their relations to language? What about our ability to synthesize images (draw) from a written description?.

An educated teenager that heard about Darwin, Galileo, Pearson, and Marr (evolution, mathematical modeling, eigenvectors, decorrelation, and vision) could be disappointed by the simplicity of these conclusions. However, note that my claims can be louder now than 20 years ago because of the time spent in accumulating evidence (and writing this piece of code). I hope that the next 20 years are fruitful enough to make these conclusions stronger or (even better!) to change some of them.

Impact of Colored Noise in science libraries

As I told Eero Simoncelli once, while few people make a big impact on the scientific community, what others (including myself) do can be seen as injecting colored noise in the science libraries and the internet. Nevertheless, as argued below, that is not a major problem, but even something worth funding.

Some thoughts on JCR publications and the h-index

For ordinary (not-Nobel-laureate) people, research is mainly a personal learning experience. Such process starts with some childish initial curiosity and ends with refereed publication. It involves putting the question in context, saying something coherent about it, and convincing critical reviewers about the accuracy of such statement (no matter it is ground-breaking or not). Given the quality control imposed by peer review (particularly in high impact journals) the publishing-in-fine-journals exercise is one of the most comprehensive learning procedures ever developed. Even though the publications of the average scientist remain unknown or never make a global difference (i.e. low h-index), the rigor of the learning process in JCR-journals ensures this person has the deepest understanding of the issues. And this has a local impact in the dissemination of knowledge to others, either (local) students or (local) industries. A cohort of average scientists well trained through the publication process have to be there, ready to understand, confirm, disseminate and apply what (the few) original scientist happen to discover. In my view, that is the justification of devoting public money to fund average scientific research (or random colored noise generation ;-). Note that samples from colored noise do not distribute as a sphere, but collectively they point to a certain direction, hopefully the right one!.

For those of you who do not share this personal learning view, and love rankings better, here is the impact of my research (by July 2015, i.e. automatically outdated) according to my Google Scholar profile: my Hirsch index was 19, the total number of citations to my work was 876, so I was the 3rd most-cited scientist in the world in the (Google Scholar ;-) category of Image Statistics, the 28th one in Human Vision, the 87th in Visual Perception, and the 263rd in Vision. So what?

To me, the undeniable peak of my scientific career happened when a mild morning of February 2001, I left my office at the NASA Ames Research Center and drove my Toyota through the rocket wind tunnels to attend a talk at a nearby town in Silicon Valley on the vision abilities of HAL-9000, the famous computer of Stanley Kubrick’s 2001: A Space Odyssey. The combination of NASA, 2001, and HAL-9000 together really felt like big science. Particularly compared to my Spanish postdoc salary and housing prices of the dot-com bubble. Since that glorious morning, I felt like Dr. Dave Bowman for a second; everything else has been a steady decline.


4. My teaching activities: like Richard Dawkins in a Republican Convention

Why an optometrist (or engineer) would ever care about Maths (or Science)?

My teaching activity at the university spans over 19 years (only one less than my research activity). This means that I had to teach while obtaining my PhD. This undesirable situation happened since at that time (mid 90s) getting a PhD grant was restricted to students of professors having public funds (which was not the case of my advisor). Therefore, I stayed at the university only because (i) I won the European Vistakon Research Award (which I used to pay my PhD research for one year), and (ii) a new degree on Optometry and Vision Science was established at my university and it generated several openings for junior assistant professors.

The quality of my teaching over these two decades has been rated by my students according to the regulations in my university (in a scale of 5) as 3.6 ± 0.3, i.e. they gave me a positive rating with a small variance over the years.

My teaching activity has been modulated by (1) my interest in Vision Science, and (2) by having most of my docent duties associated to the Degree and Master on Optometry and Vision Science. The correspondence between these two factors has been positive since it gave coherence to the research and teaching activities. However, the problem with Optometry students is that they imagine themselves as Medical Doctors (and you know that Evidence-Based Medicine is a recent field!). As a result, these students are not quite prepared for the practice of quantitative science (is there any non-quantitative science anyway?). This problem represented, (i) a challenge to convey the quantitative message to students with non-quantitative interests, and (ii) an incentive to diversify my teaching activity looking for students not scared by scalar products. The challenge posed by the non-quantitative students lead to the development of Matlab tools such as COLORLAB, BasicVideoTools, and VirtualNeuroLabs, and new docent methodologies [ProyDocente02] to convey the quantitative credo to students afraid of Maths. In this quantitative effort I found a lot of help and support from M.J. Luque and P. Capilla (respectively ;-) Sometimes I really feel as hopeless as Richard Dawkins at a Republican convention. But having a lot of fun, though!. On second thoughts, my Optometry undergrads are not that bad: deficient education is always a problem of the teachers, not the students. Please excuse the trivial comparison with the creationists!.

In order to diversify the audience, I also lectured in PhD and Master programs with Excellence distinction out of my Optics department, as for instance at the Applied Maths and Computer Science departments of my university, at the Institute of Applied Ophtalmo-Biology (Univ. Valladolid), and at the Institut de Robòtica i Informàtica Industrial (UPC). You can find slides, lecture notes and computer material for PhD courses here.

Finally, I have to mention my best (or more patient) students: those who dared to be advised by me in their PhD years: Irene Epifanio, Juan Gutiérrez, and Valero Laparra. They got doctorate degrees with a number of JCR publications, best PhD and Master Thesis awards in PhD programs with European Excellence distinction, etc… Nevertheless, the best is what I learned from them: thank you all, it was a lot of fun!.


5. Economic constraints of science in Spain

Why a positive evaluation for professorship does not imply an actual position in Spain?

Saying that “I did it my way despite the economic constraints” was an obvious literary license (for the evaluation committee). The truth is that my generation has been extremely lucky since Spain experienced an unprecedented window of opportunities for young scientists in the late 90s and early 2000s. In this short time window the economic effort started in the 80s (after we got democracy) to build a European-like science system, led to a mature public research system in the 90s (private sector didn’t go that fast). Favorable economic environment in the 90s and European funds steadily fueled this system and average scientific production in Spain achieved world-class level for the first time in history. In this situation it is easier to do it your way. It is fair to acknowledge that scientific freedom is a by-product of favorable conditions.

May be Marx wasn’t that wrong after all. Particularly considering how the situation has changed since the 2008 crisis. Sadly, the favorable time window may be closing in Spain (and in other places in southern Europe). Conservative governments in Spain do not see basic research as an investment for the future, but as a luxury you can disregard (Nature, Dec. 2011).

This short-sighted policy affects both young and senior scientists. Massive budget cuts reduce the possibility to get PhD students, and those who finally complete their PhD have small chances here. Postdocs are scarce and, for some years now, new associate professor positions are extremely rare. In the same vein, no new full professor position has been created since 2011, and retirement-related positions are only covered at a 50% rate. Before the crisis, the official Accreditation for University Professor (after a thorough independent review) used to be equivalent to getting an actual Professorship since there were no major funding problems. Now those days are over. The careers of accredited scholars (otherwise professors) are indefinitely truncated.

An association of Accredited University Professors (website in Spanish) was created to demand solutions for this unfair blocked-career situation (see the manifest in English). Major worker unions CCOO and UGT support our demands. As in scientific research (see the colored noise concept) it is the collective action what defines the direction to go. Please sign up!


For further details on each of these sections, including my research contributions and teaching philosophy, I have included over 150 hyperlinks throughout the text, providing access to my full publications, tools, and additional resources.

Luis Gómez-Chova

\ No newline at end of file diff --git a/people/index.html b/people/index.html index 43724112..5ddcba3c 100644 --- a/people/index.html +++ b/people/index.html @@ -2,7 +2,7 @@

Full Professors

Image of Gustau Camps-Valls
Gustau Camps-Valls

My research is related to statistical learning for modeling and understanding the Earth system.

Full Professors

Image of Gustau Camps-Valls
Gustau Camps-Valls

My research is related to statistical learning for modeling and understanding the Earth system.

César Luis Aybar

\ No newline at end of file diff --git a/people/phd_students/cristina_radin/index.html b/people/phd_students/cristina_radin/index.html index 0e6ea861..e10e99d6 100644 --- a/people/phd_students/cristina_radin/index.html +++ b/people/phd_students/cristina_radin/index.html @@ -2,4 +2,4 @@

Cristina Radin

\ No newline at end of file +

Cristina Radin

\ No newline at end of file diff --git a/people/phd_students/dan_lopez/index.html b/people/phd_students/dan_lopez/index.html index 396d7d53..9dd03d5f 100644 --- a/people/phd_students/dan_lopez/index.html +++ b/people/phd_students/dan_lopez/index.html @@ -2,4 +2,4 @@

Dan López

\ No newline at end of file +

Dan López

\ No newline at end of file diff --git a/people/phd_students/deborah_bassotto/index.html b/people/phd_students/deborah_bassotto/index.html index 6284ace4..507aae7d 100644 --- a/people/phd_students/deborah_bassotto/index.html +++ b/people/phd_students/deborah_bassotto/index.html @@ -2,4 +2,4 @@

Deborah Bassotto

\ No newline at end of file +

Deborah Bassotto

\ No newline at end of file diff --git a/people/phd_students/enrique_portales/index.html b/people/phd_students/enrique_portales/index.html index 6256cfcd..cd8a52fa 100644 --- a/people/phd_students/enrique_portales/index.html +++ b/people/phd_students/enrique_portales/index.html @@ -2,4 +2,4 @@

Enrique Portalés

\ No newline at end of file +

Enrique Portalés

\ No newline at end of file diff --git a/people/phd_students/homer_durand/index.html b/people/phd_students/homer_durand/index.html index b77fe8db..7da39257 100644 --- a/people/phd_students/homer_durand/index.html +++ b/people/phd_students/homer_durand/index.html @@ -2,4 +2,4 @@

Homer Durand

\ No newline at end of file +

Homer Durand

\ No newline at end of file diff --git a/people/phd_students/inti_luna/index.html b/people/phd_students/inti_luna/index.html index 42ee7b8f..62a8bf5e 100644 --- a/people/phd_students/inti_luna/index.html +++ b/people/phd_students/inti_luna/index.html @@ -2,4 +2,4 @@

Inti Luna

\ No newline at end of file +

Inti Luna

\ No newline at end of file diff --git a/people/phd_students/is_this_you/index.html b/people/phd_students/is_this_you/index.html index 360e617b..1517a0c9 100644 --- a/people/phd_students/is_this_you/index.html +++ b/people/phd_students/is_this_you/index.html @@ -2,4 +2,4 @@

Is this you?

\ No newline at end of file +

Is this you?

\ No newline at end of file diff --git a/people/phd_students/javier_amaya/index.html b/people/phd_students/javier_amaya/index.html index 51d222dd..cbf5fd11 100644 --- a/people/phd_students/javier_amaya/index.html +++ b/people/phd_students/javier_amaya/index.html @@ -2,4 +2,4 @@

Javier Martínez Amaya

\ No newline at end of file +

Javier Martínez Amaya

\ No newline at end of file diff --git a/people/phd_students/jordi_cerda/index.html b/people/phd_students/jordi_cerda/index.html index f4b6058e..baa64088 100644 --- a/people/phd_students/jordi_cerda/index.html +++ b/people/phd_students/jordi_cerda/index.html @@ -2,4 +2,4 @@

Jordi Cerdà

\ No newline at end of file +

Jordi Cerdà

\ No newline at end of file diff --git a/people/phd_students/jordi_cortez/index.html b/people/phd_students/jordi_cortez/index.html index b4290f20..0def16fe 100644 --- a/people/phd_students/jordi_cortez/index.html +++ b/people/phd_students/jordi_cortez/index.html @@ -2,4 +2,4 @@

Jordi Cortes

\ No newline at end of file +

Jordi Cortes

\ No newline at end of file diff --git a/people/phd_students/jorge_garcia/index.html b/people/phd_students/jorge_garcia/index.html index 87b9ab7f..63b5e9a5 100644 --- a/people/phd_students/jorge_garcia/index.html +++ b/people/phd_students/jorge_garcia/index.html @@ -2,4 +2,4 @@

Jorge García

\ No newline at end of file +

Jorge García

\ No newline at end of file diff --git a/people/phd_students/jorge_vila/index.html b/people/phd_students/jorge_vila/index.html index 2e35af05..b25352d7 100644 --- a/people/phd_students/jorge_vila/index.html +++ b/people/phd_students/jorge_vila/index.html @@ -2,4 +2,4 @@

Jorge Vila Tomás

\ No newline at end of file +

Jorge Vila Tomás

\ No newline at end of file diff --git a/people/phd_students/jose_tarraga/index.html b/people/phd_students/jose_tarraga/index.html index 8fd4ff2d..881f62db 100644 --- a/people/phd_students/jose_tarraga/index.html +++ b/people/phd_students/jose_tarraga/index.html @@ -2,4 +2,4 @@

Jose Maria Tárraga

\ No newline at end of file +

Jose Maria Tárraga

\ No newline at end of file diff --git a/people/phd_students/kai_hendrik/index.html b/people/phd_students/kai_hendrik/index.html index 5a48b9f3..98ea774c 100644 --- a/people/phd_students/kai_hendrik/index.html +++ b/people/phd_students/kai_hendrik/index.html @@ -2,4 +2,4 @@

Kai-Hendrik Cohrs

\ No newline at end of file +

Kai-Hendrik Cohrs

\ No newline at end of file diff --git a/people/phd_students/laura_martinez/index.html b/people/phd_students/laura_martinez/index.html index 8a6b234d..700cde4a 100644 --- a/people/phd_students/laura_martinez/index.html +++ b/people/phd_students/laura_martinez/index.html @@ -2,4 +2,4 @@

Laura Martínez

\ No newline at end of file +

Laura Martínez

\ No newline at end of file diff --git a/people/phd_students/maria_gonzalez/index.html b/people/phd_students/maria_gonzalez/index.html index 51bb69b9..7eba27e4 100644 --- a/people/phd_students/maria_gonzalez/index.html +++ b/people/phd_students/maria_gonzalez/index.html @@ -2,4 +2,4 @@

Maria Gonzalez

\ No newline at end of file +

Maria Gonzalez

\ No newline at end of file diff --git a/people/phd_students/mengxue_zhang/index.html b/people/phd_students/mengxue_zhang/index.html index 8ab65c78..8f8f4885 100644 --- a/people/phd_students/mengxue_zhang/index.html +++ b/people/phd_students/mengxue_zhang/index.html @@ -2,4 +2,4 @@

Mengxue Zhang

\ No newline at end of file +

Mengxue Zhang

\ No newline at end of file diff --git a/people/phd_students/moritz_link/index.html b/people/phd_students/moritz_link/index.html index 13b216ca..1ed7b5cd 100644 --- a/people/phd_students/moritz_link/index.html +++ b/people/phd_students/moritz_link/index.html @@ -2,4 +2,4 @@

Moritz Link

\ No newline at end of file +

Moritz Link

\ No newline at end of file diff --git a/people/phd_students/pablo_hernandez_camara/index.html b/people/phd_students/pablo_hernandez_camara/index.html index 25781d50..96c04d4b 100644 --- a/people/phd_students/pablo_hernandez_camara/index.html +++ b/people/phd_students/pablo_hernandez_camara/index.html @@ -2,4 +2,4 @@

Pablo Hernández Cámara

\ No newline at end of file +

Pablo Hernández Cámara

\ No newline at end of file diff --git a/people/phd_students/paolo_pelucchi/index.html b/people/phd_students/paolo_pelucchi/index.html index dc874ffd..3b3888d9 100644 --- a/people/phd_students/paolo_pelucchi/index.html +++ b/people/phd_students/paolo_pelucchi/index.html @@ -2,4 +2,4 @@

Paolo Pelucchi

\ No newline at end of file +

Paolo Pelucchi

\ No newline at end of file diff --git a/people/phd_students/simon_donike/index.html b/people/phd_students/simon_donike/index.html index 2345186a..437b8355 100644 --- a/people/phd_students/simon_donike/index.html +++ b/people/phd_students/simon_donike/index.html @@ -2,4 +2,4 @@

Simon Donike

\ No newline at end of file +

Simon Donike

\ No newline at end of file diff --git a/people/phd_students/tristan_williams/index.html b/people/phd_students/tristan_williams/index.html index e8141253..e7d8df17 100644 --- a/people/phd_students/tristan_williams/index.html +++ b/people/phd_students/tristan_williams/index.html @@ -2,4 +2,4 @@

Tristan Williams

\ No newline at end of file +

Tristan Williams

\ No newline at end of file diff --git a/people/postdocs/emiliano_diaz/index.html b/people/postdocs/emiliano_diaz/index.html index 6adfeb6d..4fcdb6e6 100644 --- a/people/postdocs/emiliano_diaz/index.html +++ b/people/postdocs/emiliano_diaz/index.html @@ -2,4 +2,4 @@

Emiliano Díaz

\ No newline at end of file +

Emiliano Díaz

\ No newline at end of file diff --git a/people/postdocs/eva_sevillano/index.html b/people/postdocs/eva_sevillano/index.html index f7cfb0c2..eb7ba1b8 100644 --- a/people/postdocs/eva_sevillano/index.html +++ b/people/postdocs/eva_sevillano/index.html @@ -2,4 +2,4 @@

Eva Sevillano Marco

\ No newline at end of file +

Eva Sevillano Marco

\ No newline at end of file diff --git a/people/postdocs/gonzalo_garcia/index.html b/people/postdocs/gonzalo_garcia/index.html index 81f77ae2..3c8ad7aa 100644 --- a/people/postdocs/gonzalo_garcia/index.html +++ b/people/postdocs/gonzalo_garcia/index.html @@ -2,4 +2,4 @@

Gonzalo Mateo-García

\ No newline at end of file +

Gonzalo Mateo-García

\ No newline at end of file diff --git a/people/postdocs/is_this_you/index.html b/people/postdocs/is_this_you/index.html index 360e617b..1517a0c9 100644 --- a/people/postdocs/is_this_you/index.html +++ b/people/postdocs/is_this_you/index.html @@ -2,4 +2,4 @@

Is this you?

\ No newline at end of file +

Is this you?

\ No newline at end of file diff --git a/people/postdocs/ivan_sanlop/index.html b/people/postdocs/ivan_sanlop/index.html index d862f93d..d99a90bb 100644 --- a/people/postdocs/ivan_sanlop/index.html +++ b/people/postdocs/ivan_sanlop/index.html @@ -2,4 +2,4 @@

Iván Sánchez-López

\ No newline at end of file +

Iván Sánchez-López

\ No newline at end of file diff --git a/people/postdocs/jorge_vicent/index.html b/people/postdocs/jorge_vicent/index.html index bfdb626a..a12f0214 100644 --- a/people/postdocs/jorge_vicent/index.html +++ b/people/postdocs/jorge_vicent/index.html @@ -2,4 +2,4 @@

Jorge Vicent Servera

\ No newline at end of file +

Jorge Vicent Servera

\ No newline at end of file diff --git a/people/postdocs/miguel_fernandez_torres/index.html b/people/postdocs/miguel_fernandez_torres/index.html index 7c0638d3..34551cf8 100644 --- a/people/postdocs/miguel_fernandez_torres/index.html +++ b/people/postdocs/miguel_fernandez_torres/index.html @@ -2,4 +2,4 @@

Miguel Ángel Fernández Torres

\ No newline at end of file +

Miguel Ángel Fernández Torres

\ No newline at end of file diff --git a/people/postdocs/nate_mankovich/index.html b/people/postdocs/nate_mankovich/index.html index 7f5b3998..62ced773 100644 --- a/people/postdocs/nate_mankovich/index.html +++ b/people/postdocs/nate_mankovich/index.html @@ -2,4 +2,4 @@

Nate Mankovich

\ No newline at end of file +

Nate Mankovich

\ No newline at end of file diff --git a/people/postdocs/oscar_pellicer/index.html b/people/postdocs/oscar_pellicer/index.html index 31862fe4..881cfce9 100644 --- a/people/postdocs/oscar_pellicer/index.html +++ b/people/postdocs/oscar_pellicer/index.html @@ -2,4 +2,4 @@

Óscar Pellicer

\ No newline at end of file +

Óscar Pellicer

\ No newline at end of file diff --git a/people/postdocs/varando/index.html b/people/postdocs/varando/index.html index 82dbec84..88acc189 100644 --- a/people/postdocs/varando/index.html +++ b/people/postdocs/varando/index.html @@ -2,4 +2,4 @@

Gherardo Varando

\ No newline at end of file +

Gherardo Varando

\ No newline at end of file diff --git a/people/postdocs/vassilis_sitokonstantinou/index.html b/people/postdocs/vassilis_sitokonstantinou/index.html index a142f072..4c976abd 100644 --- a/people/postdocs/vassilis_sitokonstantinou/index.html +++ b/people/postdocs/vassilis_sitokonstantinou/index.html @@ -2,4 +2,4 @@

Vassilis Sitokonstantinou

\ No newline at end of file +

Vassilis Sitokonstantinou

\ No newline at end of file diff --git a/people/senior_research_scientists/alvaro_moreno/index.html b/people/senior_research_scientists/alvaro_moreno/index.html index a7464bef..a54b4a47 100644 --- a/people/senior_research_scientists/alvaro_moreno/index.html +++ b/people/senior_research_scientists/alvaro_moreno/index.html @@ -2,4 +2,4 @@

Álvaro Moreno Martínez

\ No newline at end of file +

Álvaro Moreno Martínez

\ No newline at end of file diff --git a/people/senior_research_scientists/veronica_nieves/index.html b/people/senior_research_scientists/veronica_nieves/index.html index a983d694..d459b014 100644 --- a/people/senior_research_scientists/veronica_nieves/index.html +++ b/people/senior_research_scientists/veronica_nieves/index.html @@ -2,4 +2,4 @@

Veronica Nieves - Distinguished Researcher

\ No newline at end of file +

Veronica Nieves - Distinguished Researcher

\ No newline at end of file diff --git a/people/visitors/alice_re/index.html b/people/visitors/alice_re/index.html index ead1431a..d6e97952 100644 --- a/people/visitors/alice_re/index.html +++ b/people/visitors/alice_re/index.html @@ -2,4 +2,4 @@

Alice Re

\ No newline at end of file +

Alice Re

\ No newline at end of file diff --git a/people/visitors/chen_ma/index.html b/people/visitors/chen_ma/index.html index 35f5763a..d834187c 100644 --- a/people/visitors/chen_ma/index.html +++ b/people/visitors/chen_ma/index.html @@ -2,4 +2,4 @@

Chen Ma

\ No newline at end of file +

Chen Ma

\ No newline at end of file diff --git a/people/visitors/fernando_iglesias/index.html b/people/visitors/fernando_iglesias/index.html index 58cb6c1b..0994f871 100644 --- a/people/visitors/fernando_iglesias/index.html +++ b/people/visitors/fernando_iglesias/index.html @@ -2,4 +2,4 @@

Fernando Iglesias

\ No newline at end of file +

Fernando Iglesias

\ No newline at end of file diff --git a/people/visitors/francesco_martinuzzi/index.html b/people/visitors/francesco_martinuzzi/index.html index 4330576f..4d75b094 100644 --- a/people/visitors/francesco_martinuzzi/index.html +++ b/people/visitors/francesco_martinuzzi/index.html @@ -2,4 +2,4 @@

Francesco Martinuzzi

\ No newline at end of file +

Francesco Martinuzzi

\ No newline at end of file diff --git a/people/visitors/ioannis_prapas/index.html b/people/visitors/ioannis_prapas/index.html index 154a6b69..999580eb 100644 --- a/people/visitors/ioannis_prapas/index.html +++ b/people/visitors/ioannis_prapas/index.html @@ -2,4 +2,4 @@

Ioannis Prapas

\ No newline at end of file +

Ioannis Prapas

\ No newline at end of file diff --git a/people/visitors/is_this_you/index.html b/people/visitors/is_this_you/index.html index 360e617b..1517a0c9 100644 --- a/people/visitors/is_this_you/index.html +++ b/people/visitors/is_this_you/index.html @@ -2,4 +2,4 @@

Is this you?

\ No newline at end of file +

Is this you?

\ No newline at end of file diff --git a/people/visitors/jessenia_gonzalez/index.html b/people/visitors/jessenia_gonzalez/index.html index ac5c24a8..64e9c10c 100644 --- a/people/visitors/jessenia_gonzalez/index.html +++ b/people/visitors/jessenia_gonzalez/index.html @@ -2,4 +2,4 @@

Jessenia Gonzalez

\ No newline at end of file +

Jessenia Gonzalez

\ No newline at end of file diff --git a/people/visitors/shahine_bouabid/index.html b/people/visitors/shahine_bouabid/index.html index 3b71d2d6..fff4b568 100644 --- a/people/visitors/shahine_bouabid/index.html +++ b/people/visitors/shahine_bouabid/index.html @@ -2,4 +2,4 @@

Shahine Bouabid

\ No newline at end of file +

Shahine Bouabid

\ No newline at end of file diff --git a/posts/index.html b/posts/index.html index 251401f1..ac55bcd3 100644 --- a/posts/index.html +++ b/posts/index.html @@ -2,6 +2,6 @@

Tempor est exercitation ad qui pariatur quis adipisicing aliquip nisi ea consequat ipsum occaecat. Nostrud consequat ullamco laboris fugiat esse esse adipisicing velit laborum ipsum incididunt ut enim. Dolor pariatur nulla quis fugiat dolore excepteur. Aliquip ad quis aliqua enim do consequat.

Post 3

Occaecat aliqua consequat laborum ut ex aute aliqua culpa quis irure esse magna dolore quis. Proident fugiat labore eu laboris officia Lorem enim. Ipsum occaecat cillum ut tempor id sint aliqua incididunt nisi incididunt reprehenderit. Voluptate ad minim sint est aute aliquip esse occaecat tempor officia qui sunt. Aute ex ipsum id ut in est velit est laborum incididunt. Aliqua qui id do esse sunt eiusmod id deserunt eu nostrud aute sit ipsum.

Post 2

Anim eiusmod irure incididunt sint cupidatat. Incididunt irure irure irure nisi ipsum do ut quis fugiat consectetur proident cupidatat incididunt cillum. Dolore voluptate occaecat qui mollit laborum ullamco et. Ipsum laboris officia anim laboris culpa eiusmod ex magna ex cupidatat anim ipsum aute. Mollit aliquip occaecat qui sunt velit ut cupidatat reprehenderit enim sunt laborum. Velit veniam in officia nulla adipisicing ut duis officia. -Exercitation voluptate irure in irure tempor mollit Lorem nostrud ad officia.

Post 1

Tempor proident minim aliquip reprehenderit dolor et ad anim Lorem duis sint eiusmod. Labore ut ea duis dolor. Incididunt consectetur proident qui occaecat incididunt do nisi Lorem. Tempor do laborum elit laboris excepteur eiusmod do. Eiusmod nisi excepteur ut amet pariatur adipisicing Lorem. +

Tempor est exercitation ad qui pariatur quis adipisicing aliquip nisi ea consequat ipsum occaecat. Nostrud consequat ullamco laboris fugiat esse esse adipisicing velit laborum ipsum incididunt ut enim. Dolor pariatur nulla quis fugiat dolore excepteur. Aliquip ad quis aliqua enim do consequat.

Post 3

Occaecat aliqua consequat laborum ut ex aute aliqua culpa quis irure esse magna dolore quis. Proident fugiat labore eu laboris officia Lorem enim. Ipsum occaecat cillum ut tempor id sint aliqua incididunt nisi incididunt reprehenderit. Voluptate ad minim sint est aute aliquip esse occaecat tempor officia qui sunt. Aute ex ipsum id ut in est velit est laborum incididunt. Aliqua qui id do esse sunt eiusmod id deserunt eu nostrud aute sit ipsum.

Post 2

Anim eiusmod irure incididunt sint cupidatat. Incididunt irure irure irure nisi ipsum do ut quis fugiat consectetur proident cupidatat incididunt cillum. Dolore voluptate occaecat qui mollit laborum ullamco et. Ipsum laboris officia anim laboris culpa eiusmod ex magna ex cupidatat anim ipsum aute. Mollit aliquip occaecat qui sunt velit ut cupidatat reprehenderit enim sunt laborum. Velit veniam in officia nulla adipisicing ut duis officia. +Exercitation voluptate irure in irure tempor mollit Lorem nostrud ad officia.

Post 1

Tempor proident minim aliquip reprehenderit dolor et ad anim Lorem duis sint eiusmod. Labore ut ea duis dolor. Incididunt consectetur proident qui occaecat incididunt do nisi Lorem. Tempor do laborum elit laboris excepteur eiusmod do. Eiusmod nisi excepteur ut amet pariatur adipisicing Lorem. Occaecat nulla excepteur dolore excepteur duis eiusmod ullamco officia anim in voluptate ea occaecat officia. Cillum sint esse velit ea officia minim fugiat. Elit ea esse id aliquip pariatur cupidatat id duis minim incididunt ea ea.
\ No newline at end of file diff --git a/posts/index.xml b/posts/index.xml index c3182560..22d33256 100644 --- a/posts/index.xml +++ b/posts/index.xml @@ -1,3 +1,3 @@ -Posts on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/posts/Recent content in Posts on ISP - Image and Signal Processing groupHugoen-usWed, 15 Mar 2023 11:00:00 -0700Post 3https://ipl-uv.github.io/posts/post-3/Wed, 15 Mar 2023 11:00:00 -0700https://ipl-uv.github.io/posts/post-3/Occaecat aliqua consequat laborum ut ex aute aliqua culpa quis irure esse magna dolore quis. Proident fugiat labore eu laboris officia Lorem enim. Ipsum occaecat cillum ut tempor id sint aliqua incididunt nisi incididunt reprehenderit. Voluptate ad minim sint est aute aliquip esse occaecat tempor officia qui sunt. Aute ex ipsum id ut in est velit est laborum incididunt. Aliqua qui id do esse sunt eiusmod id deserunt eu nostrud aute sit ipsum.Post 2https://ipl-uv.github.io/posts/post-2/Wed, 15 Feb 2023 10:00:00 -0700https://ipl-uv.github.io/posts/post-2/Anim eiusmod irure incididunt sint cupidatat. Incididunt irure irure irure nisi ipsum do ut quis fugiat consectetur proident cupidatat incididunt cillum. Dolore voluptate occaecat qui mollit laborum ullamco et. Ipsum laboris officia anim laboris culpa eiusmod ex magna ex cupidatat anim ipsum aute. Mollit aliquip occaecat qui sunt velit ut cupidatat reprehenderit enim sunt laborum. Velit veniam in officia nulla adipisicing ut duis officia. -Exercitation voluptate irure in irure tempor mollit Lorem nostrud ad officia.Post 1https://ipl-uv.github.io/posts/post-1/Sun, 15 Jan 2023 09:00:00 -0700https://ipl-uv.github.io/posts/post-1/Tempor proident minim aliquip reprehenderit dolor et ad anim Lorem duis sint eiusmod. Labore ut ea duis dolor. Incididunt consectetur proident qui occaecat incididunt do nisi Lorem. Tempor do laborum elit laboris excepteur eiusmod do. Eiusmod nisi excepteur ut amet pariatur adipisicing Lorem. +Posts on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/posts/Recent content in Posts on ISP - Image and Signal Processing groupHugoen-usWed, 15 Mar 2023 11:00:00 -0700Post 3https://isp.uv.es/github/posts/post-3/Wed, 15 Mar 2023 11:00:00 -0700https://isp.uv.es/github/posts/post-3/Occaecat aliqua consequat laborum ut ex aute aliqua culpa quis irure esse magna dolore quis. Proident fugiat labore eu laboris officia Lorem enim. Ipsum occaecat cillum ut tempor id sint aliqua incididunt nisi incididunt reprehenderit. Voluptate ad minim sint est aute aliquip esse occaecat tempor officia qui sunt. Aute ex ipsum id ut in est velit est laborum incididunt. Aliqua qui id do esse sunt eiusmod id deserunt eu nostrud aute sit ipsum.Post 2https://isp.uv.es/github/posts/post-2/Wed, 15 Feb 2023 10:00:00 -0700https://isp.uv.es/github/posts/post-2/Anim eiusmod irure incididunt sint cupidatat. Incididunt irure irure irure nisi ipsum do ut quis fugiat consectetur proident cupidatat incididunt cillum. Dolore voluptate occaecat qui mollit laborum ullamco et. Ipsum laboris officia anim laboris culpa eiusmod ex magna ex cupidatat anim ipsum aute. Mollit aliquip occaecat qui sunt velit ut cupidatat reprehenderit enim sunt laborum. Velit veniam in officia nulla adipisicing ut duis officia. +Exercitation voluptate irure in irure tempor mollit Lorem nostrud ad officia.Post 1https://isp.uv.es/github/posts/post-1/Sun, 15 Jan 2023 09:00:00 -0700https://isp.uv.es/github/posts/post-1/Tempor proident minim aliquip reprehenderit dolor et ad anim Lorem duis sint eiusmod. Labore ut ea duis dolor. Incididunt consectetur proident qui occaecat incididunt do nisi Lorem. Tempor do laborum elit laboris excepteur eiusmod do. Eiusmod nisi excepteur ut amet pariatur adipisicing Lorem. Occaecat nulla excepteur dolore excepteur duis eiusmod ullamco officia anim in voluptate ea occaecat officia. Cillum sint esse velit ea officia minim fugiat. Elit ea esse id aliquip pariatur cupidatat id duis minim incididunt ea ea. \ No newline at end of file diff --git a/posts/post-1/index.html b/posts/post-1/index.html index 949e1181..f9b89518 100644 --- a/posts/post-1/index.html +++ b/posts/post-1/index.html @@ -2,4 +2,4 @@

Tempor proident minim aliquip reprehenderit dolor et ad anim Lorem duis sint eiusmod. Labore ut ea duis dolor. Incididunt consectetur proident qui occaecat incididunt do nisi Lorem. Tempor do laborum elit laboris excepteur eiusmod do. Eiusmod nisi excepteur ut amet pariatur adipisicing Lorem.

Occaecat nulla excepteur dolore excepteur duis eiusmod ullamco officia anim in voluptate ea occaecat officia. Cillum sint esse velit ea officia minim fugiat. Elit ea esse id aliquip pariatur cupidatat id duis minim incididunt ea ea. Anim ut duis sunt nisi. Culpa cillum sit voluptate voluptate eiusmod dolor. Enim nisi Lorem ipsum irure est excepteur voluptate eu in enim nisi. Nostrud ipsum Lorem anim sint labore consequat do.

\ No newline at end of file +

Tempor proident minim aliquip reprehenderit dolor et ad anim Lorem duis sint eiusmod. Labore ut ea duis dolor. Incididunt consectetur proident qui occaecat incididunt do nisi Lorem. Tempor do laborum elit laboris excepteur eiusmod do. Eiusmod nisi excepteur ut amet pariatur adipisicing Lorem.

Occaecat nulla excepteur dolore excepteur duis eiusmod ullamco officia anim in voluptate ea occaecat officia. Cillum sint esse velit ea officia minim fugiat. Elit ea esse id aliquip pariatur cupidatat id duis minim incididunt ea ea. Anim ut duis sunt nisi. Culpa cillum sit voluptate voluptate eiusmod dolor. Enim nisi Lorem ipsum irure est excepteur voluptate eu in enim nisi. Nostrud ipsum Lorem anim sint labore consequat do.

\ No newline at end of file diff --git a/posts/post-2/index.html b/posts/post-2/index.html index 21643260..440840f6 100644 --- a/posts/post-2/index.html +++ b/posts/post-2/index.html @@ -2,4 +2,4 @@

Anim eiusmod irure incididunt sint cupidatat. Incididunt irure irure irure nisi ipsum do ut quis fugiat consectetur proident cupidatat incididunt cillum. Dolore voluptate occaecat qui mollit laborum ullamco et. Ipsum laboris officia anim laboris culpa eiusmod ex magna ex cupidatat anim ipsum aute. Mollit aliquip occaecat qui sunt velit ut cupidatat reprehenderit enim sunt laborum. Velit veniam in officia nulla adipisicing ut duis officia.

Exercitation voluptate irure in irure tempor mollit Lorem nostrud ad officia. Velit id fugiat occaecat do tempor. Sit officia Lorem aliquip eu deserunt consectetur. Aute proident deserunt in nulla aliquip dolore ipsum Lorem ut cupidatat consectetur sit sint laborum. Esse cupidatat sit sint sunt tempor exercitation deserunt. Labore dolor duis laborum est do nisi ut veniam dolor et nostrud nostrud.

\ No newline at end of file +

Anim eiusmod irure incididunt sint cupidatat. Incididunt irure irure irure nisi ipsum do ut quis fugiat consectetur proident cupidatat incididunt cillum. Dolore voluptate occaecat qui mollit laborum ullamco et. Ipsum laboris officia anim laboris culpa eiusmod ex magna ex cupidatat anim ipsum aute. Mollit aliquip occaecat qui sunt velit ut cupidatat reprehenderit enim sunt laborum. Velit veniam in officia nulla adipisicing ut duis officia.

Exercitation voluptate irure in irure tempor mollit Lorem nostrud ad officia. Velit id fugiat occaecat do tempor. Sit officia Lorem aliquip eu deserunt consectetur. Aute proident deserunt in nulla aliquip dolore ipsum Lorem ut cupidatat consectetur sit sint laborum. Esse cupidatat sit sint sunt tempor exercitation deserunt. Labore dolor duis laborum est do nisi ut veniam dolor et nostrud nostrud.

\ No newline at end of file diff --git a/posts/post-3/index.html b/posts/post-3/index.html index 3270cdb5..d1a88566 100644 --- a/posts/post-3/index.html +++ b/posts/post-3/index.html @@ -2,4 +2,4 @@

Occaecat aliqua consequat laborum ut ex aute aliqua culpa quis irure esse magna dolore quis. Proident fugiat labore eu laboris officia Lorem enim. Ipsum occaecat cillum ut tempor id sint aliqua incididunt nisi incididunt reprehenderit. Voluptate ad minim sint est aute aliquip esse occaecat tempor officia qui sunt. Aute ex ipsum id ut in est velit est laborum incididunt. Aliqua qui id do esse sunt eiusmod id deserunt eu nostrud aute sit ipsum. Deserunt esse cillum Lorem non magna adipisicing mollit amet consequat.

Bryce Canyon National Park

Sit excepteur do velit veniam mollit in nostrud laboris incididunt ea. Amet eu cillum ut reprehenderit culpa aliquip labore laborum amet sit sit duis. Laborum id proident nostrud dolore laborum reprehenderit quis mollit nulla amet veniam officia id id. Aliquip in deserunt qui magna duis qui pariatur officia sunt deserunt.

\ No newline at end of file +

Occaecat aliqua consequat laborum ut ex aute aliqua culpa quis irure esse magna dolore quis. Proident fugiat labore eu laboris officia Lorem enim. Ipsum occaecat cillum ut tempor id sint aliqua incididunt nisi incididunt reprehenderit. Voluptate ad minim sint est aute aliquip esse occaecat tempor officia qui sunt. Aute ex ipsum id ut in est velit est laborum incididunt. Aliqua qui id do esse sunt eiusmod id deserunt eu nostrud aute sit ipsum. Deserunt esse cillum Lorem non magna adipisicing mollit amet consequat.

Bryce Canyon National Park

Sit excepteur do velit veniam mollit in nostrud laboris incididunt ea. Amet eu cillum ut reprehenderit culpa aliquip labore laborum amet sit sit duis. Laborum id proident nostrud dolore laborum reprehenderit quis mollit nulla amet veniam officia id id. Aliquip in deserunt qui magna duis qui pariatur officia sunt deserunt.

\ No newline at end of file diff --git a/projects/current/ai4pex/index.html b/projects/current/ai4pex/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/ai4pex/index.html +++ b/projects/current/ai4pex/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/ai_complex/index.html b/projects/current/ai_complex/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/ai_complex/index.html +++ b/projects/current/ai_complex/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/causal_inference/index.html b/projects/current/causal_inference/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/causal_inference/index.html +++ b/projects/current/causal_inference/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/cimr/index.html b/projects/current/cimr/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/cimr/index.html +++ b/projects/current/cimr/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/deep_cloud/index.html b/projects/current/deep_cloud/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/deep_cloud/index.html +++ b/projects/current/deep_cloud/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/deep_extremes/index.html b/projects/current/deep_extremes/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/deep_extremes/index.html +++ b/projects/current/deep_extremes/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/deepcube/index.html b/projects/current/deepcube/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/deepcube/index.html +++ b/projects/current/deepcube/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/elias/index.html b/projects/current/elias/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/elias/index.html +++ b/projects/current/elias/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/elise/index.html b/projects/current/elise/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/elise/index.html +++ b/projects/current/elise/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/ellis/index.html b/projects/current/ellis/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/ellis/index.html +++ b/projects/current/ellis/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/hermes/index.html b/projects/current/hermes/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/hermes/index.html +++ b/projects/current/hermes/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/i_aida/index.html b/projects/current/i_aida/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/i_aida/index.html +++ b/projects/current/i_aida/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/imiracli/index.html b/projects/current/imiracli/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/imiracli/index.html +++ b/projects/current/imiracli/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/maloc/index.html b/projects/current/maloc/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/maloc/index.html +++ b/projects/current/maloc/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/maloph/index.html b/projects/current/maloph/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/maloph/index.html +++ b/projects/current/maloph/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/meditwin/index.html b/projects/current/meditwin/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/meditwin/index.html +++ b/projects/current/meditwin/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/opensr/index.html b/projects/current/opensr/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/opensr/index.html +++ b/projects/current/opensr/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/thinkingearth/index.html b/projects/current/thinkingearth/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/thinkingearth/index.html +++ b/projects/current/thinkingearth/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/usmile/index.html b/projects/current/usmile/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/usmile/index.html +++ b/projects/current/usmile/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/current/xaida/index.html b/projects/current/xaida/index.html index b48c5abd..b87a1443 100644 --- a/projects/current/xaida/index.html +++ b/projects/current/xaida/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/index.html b/projects/index.html index 1453f4da..7ca79285 100644 --- a/projects/index.html +++ b/projects/index.html @@ -2,4 +2,4 @@

Current projects

Image of AI for complex systems: Brain, Earth, Climate, SocietyAI for complex systems: Brain, Earth, Climate, Society
UV-PIs: G. Camps-Valls, M. Piles
Duration: (2022-2026)
Funding Source: Generalitat Valenciana - PROMETEO programme
Image of AI4PEX: Artificial Intelligence for enhanced representation of processes and extremes in Earth System ModelsAI4PEX: Artificial Intelligence for enhanced representation of processes and extremes in Earth System Models
UV-PIs: Gustau Camps-Valls, Maria Piles
Duration: (2024-2027)
Image of Causal Inference to Understand the Impact of Humanitarian Interventions on Food Security in AfricaCausal Inference to Understand the Impact of Humanitarian Interventions on Food Security in Africa
UV-PIs: Gustau Camps-Valls, Gherardo Varando
PIs: Jose Maria Tarraga
Duration: (2022-2024)
Funding Source: Microsoft Research - Microsoft Climate Research Initiative
Image of CIMR-DEVALGO: CIMR Level-2 Algorithm DevelopmentCIMR-DEVALGO: CIMR Level-2 Algorithm Development
UV-PIs: M. Piles
PIs: Thomas Lavergne (Norwegian Meteorological Institute)
Duration: (2023-2024)
Funding Source: ESA ITT
Partners: Norwegian Meteorological Institute
Image of DEEPCLOUD: Deep learning tools for operational cloud detection in Earth observation satellite images.DEEPCLOUD: Deep learning tools for operational cloud detection in Earth observation satellite images.
UV-PIs: L. Gomez-Chova
Duration: (2020-2024)
Funding Source: Spanish Ministry of Science, Innovation and Universities (MCIU/AEI/FEDER, UE), PID2019-109026RB-I00
Image of DeepCube: Explainable AI pipelines for big Copernicus dataDeepCube: Explainable AI pipelines for big Copernicus data
UV-PIs: Gustau Camps-Valls, M. Piles
Duration: (2021-2024)
Funding Source: EU H2020
Project Type: Research Project
Image of DeepExtremes: Multi-Hazards, Compounds and Cascade eventsDeepExtremes: Multi-Hazards, Compounds and Cascade events
UV-PIs: G. Camps-Valls
Duration: (2022--)
Funding Source: AI for Science 2021
Image of ELIAS: European Lighthouse of AI for SustainabilityELIAS: European Lighthouse of AI for Sustainability
UV-PIs: Gustau Camps-Valls
Duration: (2023-2026)
Project Type: Research Project
Image of ELISE: European Learning And Intelligent Systems ExcellenceELISE: European Learning And Intelligent Systems Excellence
UV-PIs: Gustau Camps-Valls
Duration: (2020-2023)
Funding Source: ICT-48
Project Type: Research Project
Image of ELLIS - European Laboratory for Learning and Intelligent SystemsELLIS - European Laboratory for Learning and Intelligent Systems
UV-PIs: Gustau Camps-Valls
PIs: Markus Reichstein
Duration: (2019-)
Role: Universitat de València co-coordinates the research program "Machine Learning for the Earth and Climate Sciences"
Project Type: Research Program
Image of HERMES: Hybrid Estimation and Remote Sensing Monitoring of Evaporation and Soil MoistureHERMES: Hybrid Estimation and Remote Sensing Monitoring of Evaporation and Soil Moisture
UV-PIs: Gustau Camps-Valls, Alvaro Moreno
PIs: Diego Miralles (Uni Ghent), Miguel Mahecha (Uni Leipzig)
Duration: (2023-2024)
Funding Source: BELSPO Stereo IV Research program
Partners: Uni Ghent, Uni Leipzig
Image of i-AIDA: International AI Doctoral Academyi-AIDA: International AI Doctoral Academy
UV-PIs: G. Camps-Valls
Duration: (2022--)
Funding Source: ICT-48
Image of iMIRACLI: innovative MachIne leaRning to constrain Aerosol-cloud CLimate ImpactsiMIRACLI: innovative MachIne leaRning to constrain Aerosol-cloud CLimate Impacts
PIs: G. Camps-Valls
Duration: (2020-2023)
Funding Source: ETN Marie Curie Training Network
Project Type: Training Network
Image of MALOC: MAchine Learning for assessing Ocean ClimateMALOC: MAchine Learning for assessing Ocean Climate
UV-PIs: V. Nieves
Duration: (2020-2024)
Funding Source: Generalitat Valenciana - Plan GenT (CIDEGENT/2019/055)
Image of MALOPH: A novel MAchine Learning based perspective to identify and model Ocean Precursors to extreme Hurricane developmentMALOPH: A novel MAchine Learning based perspective to identify and model Ocean Precursors to extreme Hurricane development
UV-PIs: V. Nieves
Duration: (2021-2024)
Funding Source: European Space Agency - Open Discovery Ideas Channel
Image of MediTwin: Mediterranean Digital Twin Network for Understanding Climate ExtremesMediTwin: Mediterranean Digital Twin Network for Understanding Climate Extremes
UV-PIs: Gustau Camps-Valls
Duration: (2024-2027)
Image of OpenSR — Robust, accountable super-resolution for Sentinel-2 and beyondOpenSR — Robust, accountable super-resolution for Sentinel-2 and beyond
UV-PIs: L. Gómez-Chova
PIs: F. Kalaitzis (U. Oxford)
Duration: (2022-2024)
Funding Source: ESA - Towards Explainable AI: Application to Trustworthy Super-Resolution
Partners: U. Oxford
Image of ThinkingEarth - Copernicus Foundation Models for a Thinking EarthThinkingEarth - Copernicus Foundation Models for a Thinking Earth
UV-PIs: Gustau Camps-Valls
Duration: (2024-2027)
Project Type: Research Project
Image of USMILE: Understanding and Modeling the Earth System with Machine LearningUSMILE: Understanding and Modeling the Earth System with Machine Learning
UV-PIs: G. Camps-Valls
PIs: V. Eyring, M. Reichstein, P. Gentine
Duration: (2020-2026)
Funding Source: ERC Synergy Grant
Project Type: Research Project
Image of XAIDA: Extreme Events - Artificial Intelligence for Detection and AttributionXAIDA: Extreme Events - Artificial Intelligence for Detection and Attribution
UV-PIs: Gustau Camps-Valls
Duration: (2021-2025)
Funding Source: EU H2020
Project Type: Research Project

Past projects

Image of Analysis of Water Relations in the Soil-Plant Continuum Using Microwave-Lidar SynergyAnalysis of Water Relations in the Soil-Plant Continuum Using Microwave-Lidar Synergy
UV-PIs: M. Piles
PIs: D. Chaparro, M. Vall-llossera, D. Entekhabi
Duration: (2020-2021)
Funding Source: MIT-Spain-"la Caixa" Foundation Seed Fund
Image of Application of New Visual-Statistical Models for the Restoration of Images Affected by Noise and BlurApplication of New Visual-Statistical Models for the Restoration of Images Affected by Noise and Blur
Duration: (2004-2006)
Funding Source: Spanish Ministry of Defence
Image of Applications in Biomedicine and Human Vision ModelsApplications in Biomedicine and Human Vision Models
Duration: (2004-2005)
Funding Source: Scientific Excellence Groups of the Valencian Community
Image of CIMR Mission Application StudyCIMR Mission Application Study
UV-PIs: M. Piles
PIs: Christopher Merchant
Duration: (2018-2019)
Funding Source: ESA
Image of CIMR Mission Requirements and Performance ConsolidationCIMR Mission Requirements and Performance Consolidation
UV-PIs: M. Piles
PIs: Thomas Lavergne (Norwegian Meteorological Institute)
Duration: (2018-2019)
Funding Source: ESA
Image of Classification of Hyperspectral Remote Sensing Images Based on Semi-Supervised Kernel MethodsClassification of Hyperspectral Remote Sensing Images Based on Semi-Supervised Kernel Methods
Duration: (2006-2008)
Funding Source: Spanish Ministry of Education and Science
Image of Cloud Detection in the CloudCloud Detection in the Cloud
UV-PIs: L. Gomez-Chova
Duration: (2016-2017)
Funding Source: Google Earth Engine Research Award
Image of CLOUDSAT: Machine Learning in Earth Observation: Cloud Screening of Satellite ImagesCLOUDSAT: Machine Learning in Earth Observation: Cloud Screening of Satellite Images
UV-PIs: L. Gómez-Chova
Duration: (2016-2019)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Consolidation of Scientific Baseline for MTG-IRS L2 Processing: Role of Non-Linear Regression MethodsConsolidation of Scientific Baseline for MTG-IRS L2 Processing: Role of Non-Linear Regression Methods
Duration: (2009-2010)
Funding Source: EUMETSAT
Image of Control objetivo de la calidad óptica de lentes de contacto desechables durante su periodo de utilizaciónControl objetivo de la calidad óptica de lentes de contacto desechables durante su periodo de utilización
UV-PIs: J. Malo
Duration: (1995)
Funding Source: Johnson & Johnson Europe
Image of Deep Networks in the Brain: From Measurements to AlgorithmsDeep Networks in the Brain: From Measurements to Algorithms
UV-PIs: J. Malo
Duration: (2018-2021)
Funding Source: Spanish Ministry of Science, Innovation and Universities (MCIU/AEI/FEDER, UE)
Image of Design, Development, and Validation of a Device for the Early Detection of Visual AnomaliesDesign, Development, and Validation of a Device for the Early Detection of Visual Anomalies
Duration: (2001-2003)
Funding Source: CICYT Ministry of Science and Technology
Image of Development of a Local Digital Video Communication System for Residential CommunitiesDevelopment of a Local Digital Video Communication System for Residential Communities
Duration: (2000-2001)
Funding Source: CICYT-FEDER Ministry of Science and Technology
Image of Development of CHRIS/PROBA Modules for the BEAM ToolboxDevelopment of CHRIS/PROBA Modules for the BEAM Toolbox
Duration: (2007-2008)
Funding Source: ESA
Image of Efficient Color Video Compression Using Sequence Analysis Based on Perceptually Significant Image RepresentationsEfficient Color Video Compression Using Sequence Analysis Based on Perceptually Significant Image Representations
Duration: (1999-2001)
Funding Source: CICYT-FEDER Ministry of Science and Technology
Image of EODIX: Advanced Methodologies in Earth Observation: Optical Data Calibration and Information ExtractionEODIX: Advanced Methodologies in Earth Observation: Optical Data Calibration and Information Extraction
Duration: (2009-2011)
Image of ESA Climate Change Initiative (CCI) Phase 1: Essential Climate Variable (ECV) CloudESA Climate Change Initiative (CCI) Phase 1: Essential Climate Variable (ECV) Cloud
Duration: (2010-2013)
Funding Source: ESA
Image of ESA Climate Change Initiative Phase II Soil Moisture (CCI SM 2 Project)ESA Climate Change Initiative Phase II Soil Moisture (CCI SM 2 Project)
PIs: M. Piles (Visiting Scientist), Wouter Dorigo (TU Wien, PI)
Duration: (2017)
Funding Source: ESA
Image of Estimación de parámetros biofísicos para la misión FLEX a partir de Sentinel-3Estimación de parámetros biofísicos para la misión FLEX a partir de Sentinel-3
Duration: (2011-2013)
Funding Source: ESA
Image of FLUXCOM: An initiative to upscale biosphere-atmosphere fluxes from FLUXNET sites to continental and global scalesFLUXCOM: An initiative to upscale biosphere-atmosphere fluxes from FLUXNET sites to continental and global scales
PIs: PI Martin Jung
Duration: (Max Planck Institute for Biogeochemistry)
Image of GEOLEARN: Advances in Machine Learning for Large Scale Remote Sensing Data ProcessingGEOLEARN: Advances in Machine Learning for Large Scale Remote Sensing Data Processing
UV-PIs: Jordi Munoz-Mari
Duration: (2015-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of HYPERCLASS: Métodos avanzados para la clasificación de imágenes hiperespectralesHYPERCLASS: Métodos avanzados para la clasificación de imágenes hiperespectrales
Duration: (2005-2007)
Image of Improvement of the current nonlinear regression retrieval (NLR) implemented within the MTGIRS prototype processorImprovement of the current nonlinear regression retrieval (NLR) implemented within the MTGIRS prototype processor
Duration: (2014)
Funding Source: EUMETSAT
Image of Integration of nonlinear perceptual and statistical representations in image restoration and codingIntegration of nonlinear perceptual and statistical representations in image restoration and coding
Duration: (2007-2009)
Funding Source: Spanish Ministry of Science and Technology
Image of Interpolación normalizada de imágenes provenientes de múltiples sensores. Interpolación normalizada de imágenes LANDSAT mediante downscalingInterpolación normalizada de imágenes provenientes de múltiples sensores. Interpolación normalizada de imágenes LANDSAT mediante downscaling
Duration: (2009)
Funding Source: Instituto Geográfico Nacional (IGN)
Image of Knowledge extraction from the Davalor Automated Vision Evaluator (EVA)Knowledge extraction from the Davalor Automated Vision Evaluator (EVA)
UV-PIs: Jesus Malo
Duration: (2015-2017)
Funding Source: Davalor Salud Ltd.
Image of LEAVES: LEArning drivers of Vegetation health from Earth observation SynergiesLEAVES: LEArning drivers of Vegetation health from Earth observation Synergies
UV-PIs: M. Piles and J. Amorós
Duration: (2019-2021)
Funding Source: Spanish Ministry of Science, Innovation and Universities (MCIU/AEI/FEDER, UE)
Image of LIFE-VISION: Learning Image Features to Encode Visual InformationLIFE-VISION: Learning Image Features to Encode Visual Information
Duration: (2012-2015)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Mapping and the citizen sensorMapping and the citizen sensor
Duration: (2013-2016)
Funding Source: ICT COST Action
Image of MERIS/AATSR synergy algorithms for cloud screening, aerosol retrieval, and atmospheric correctionMERIS/AATSR synergy algorithms for cloud screening, aerosol retrieval, and atmospheric correction
Duration: (2008-2009)
Funding Source: European Space Agency (ESA) ESRIN
Image of MIPRCV: Multimodal Interaction in Pattern Recognition and Computer VisionMIPRCV: Multimodal Interaction in Pattern Recognition and Computer Vision
Duration: (2007-2012)
Funding Source: CONSOLIDER-INGENIO
Image of Modelos no gaussianos para la representación de imágenes y secuenciasModelos no gaussianos para la representación de imágenes y secuencias
Duration: (2003)
Funding Source: Universitat de València
Image of Modelos visuales-estadísticos de representación de imágenes y sus aplicacionesModelos visuales-estadísticos de representación de imágenes y sus aplicaciones
Duration: (2003-2006)
Funding Source: Spanish Ministry of Science and Technology
Image of Motion Estimation and Interpretation of Image SequencesMotion Estimation and Interpretation of Image Sequences
Duration: (1996-1998)
Funding Source: CICYT
Image of Natural Image Statistics: Non-parametric Models, Bayesian Models, and Computational Neuroscience for Image ProcessingNatural Image Statistics: Non-parametric Models, Bayesian Models, and Computational Neuroscience for Image Processing
Duration: (2010-2012)
Funding Source: Spanish Ministry of Economy
Image of New Compressive Sensing Algorithms from Natural and Artificial Brain NetworksNew Compressive Sensing Algorithms from Natural and Artificial Brain Networks
UV-PIs: J. Malo (co-IP)
PIs: L. Martinez-Otero (co-IP in Instit. Neurosci. CSIC)
Duration: (2014-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Optimización de los algoritmos de compresión de imágenes biomédicas mediante la utilización de parámetros perceptualesOptimización de los algoritmos de compresión de imágenes biomédicas mediante la utilización de parámetros perceptuales
Duration: (1997-1998)
Funding Source: IVEI Generalitat Valenciana
Image of Procesado de imágenes de sensores de satélite de media resolución y su integración espectro-temporal con sensores de satélite de baja resoluciónProcesado de imágenes de sensores de satélite de media resolución y su integración espectro-temporal con sensores de satélite de baja resolución
Duration: (2010-2011)
Funding Source: Instituto Geográfico Nacional (IGN)
Image of PV-CDRR: Clouds Detection Algorithms for Proba-VPV-CDRR: Clouds Detection Algorithms for Proba-V
PIs: L. Gómez-Chova
Duration: (2016-2017)
Funding Source: ESA IDEAS+ research grant
Image of Quantifying Visual Beauty: Neuroaesthetics and Machine LearningQuantifying Visual Beauty: Neuroaesthetics and Machine Learning
UV-PIs: J. Malo
Duration: (2015-2017)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Red de Excelencia KERMES: Advances in kernel methods for structured dataRed de Excelencia KERMES: Advances in kernel methods for structured data
UV-PIs: G. Camps-Valls (Coordinator)
Duration: (2017-2019)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Red de Excelencia sobre Neurociencia Visual y Ciencias de la ComputaciónRed de Excelencia sobre Neurociencia Visual y Ciencias de la Computación
UV-PIs: J. Malo (Coordinator in Univ. Valencia)
PIs: X. Otazu (Global Coordinator)
Duration: (2016-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of RELEARN: RE-using Field Reference Data in Space and Time for Vegetation MappingRELEARN: RE-using Field Reference Data in Space and Time for Vegetation Mapping
Duration: (2011-2013)
Funding Source: BELSPO
Image of SCALE: Causal Inference in the Human-Biosphere Coupled SystemSCALE: Causal Inference in the Human-Biosphere Coupled System
PIs: G. Camps-Valls
Duration: (2020-2022)
Funding Source: Fundación BBVA
Image of SEDAL: Statistical Learning for Earth Observation Data AnalysisSEDAL: Statistical Learning for Earth Observation Data Analysis
UV-PIs: G. Camps-Valls
Duration: (2015-2020)
Image of SenSyF: Sentinels Synergy FrameworkSenSyF: Sentinels Synergy Framework
Duration: (2013-2015)
Funding Source: EU 7th Framework Programme
Image of Spanish Network for the Advancement and Transfer of Applied Computational Intelligence (ATICA)Spanish Network for the Advancement and Transfer of Applied Computational Intelligence (ATICA)
UV-PIs: J. Malo
PIs: X. Otazu (Global Coordinator)
Duration: (2016-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Study on Pattern Recognition Based Cloud Detection Over LandmarksStudy on Pattern Recognition Based Cloud Detection Over Landmarks
Duration: (2015)
Funding Source: EUMETSAT
Image of Support Vector Machines and Computational Human Vision Models for Image Coding and DenoisingSupport Vector Machines and Computational Human Vision Models for Image Coding and Denoising
Duration: (2006-2008)
Funding Source: Generalitat Valenciana
Image of VLC-BioMedic/BioClinic Agreements for the Use of fMRI Scanner FacilitiesVLC-BioMedic/BioClinic Agreements for the Use of fMRI Scanner Facilities
UV-PIs: J. Malo
PIs: A. Alberich (La Fe), C. Montoliu (Clinic)
Duration: (2015-2017)
Funding Source: Generalitat Valenciana
\ No newline at end of file +

Current projects

Image of AI for complex systems: Brain, Earth, Climate, SocietyAI for complex systems: Brain, Earth, Climate, Society
UV-PIs: G. Camps-Valls, M. Piles
Duration: (2022-2026)
Funding Source: Generalitat Valenciana - PROMETEO programme
Image of AI4PEX: Artificial Intelligence for enhanced representation of processes and extremes in Earth System ModelsAI4PEX: Artificial Intelligence for enhanced representation of processes and extremes in Earth System Models
UV-PIs: Gustau Camps-Valls, Maria Piles
Duration: (2024-2027)
Image of Causal Inference to Understand the Impact of Humanitarian Interventions on Food Security in AfricaCausal Inference to Understand the Impact of Humanitarian Interventions on Food Security in Africa
UV-PIs: Gustau Camps-Valls, Gherardo Varando
PIs: Jose Maria Tarraga
Duration: (2022-2024)
Funding Source: Microsoft Research - Microsoft Climate Research Initiative
Image of CIMR-DEVALGO: CIMR Level-2 Algorithm DevelopmentCIMR-DEVALGO: CIMR Level-2 Algorithm Development
UV-PIs: M. Piles
PIs: Thomas Lavergne (Norwegian Meteorological Institute)
Duration: (2023-2024)
Funding Source: ESA ITT
Partners: Norwegian Meteorological Institute
Image of DEEPCLOUD: Deep learning tools for operational cloud detection in Earth observation satellite images.DEEPCLOUD: Deep learning tools for operational cloud detection in Earth observation satellite images.
UV-PIs: L. Gomez-Chova
Duration: (2020-2024)
Funding Source: Spanish Ministry of Science, Innovation and Universities (MCIU/AEI/FEDER, UE), PID2019-109026RB-I00
Image of DeepCube: Explainable AI pipelines for big Copernicus dataDeepCube: Explainable AI pipelines for big Copernicus data
UV-PIs: Gustau Camps-Valls, M. Piles
Duration: (2021-2024)
Funding Source: EU H2020
Project Type: Research Project
Image of DeepExtremes: Multi-Hazards, Compounds and Cascade eventsDeepExtremes: Multi-Hazards, Compounds and Cascade events
UV-PIs: G. Camps-Valls
Duration: (2022--)
Funding Source: AI for Science 2021
Image of ELIAS: European Lighthouse of AI for SustainabilityELIAS: European Lighthouse of AI for Sustainability
UV-PIs: Gustau Camps-Valls
Duration: (2023-2026)
Project Type: Research Project
Image of ELISE: European Learning And Intelligent Systems ExcellenceELISE: European Learning And Intelligent Systems Excellence
UV-PIs: Gustau Camps-Valls
Duration: (2020-2023)
Funding Source: ICT-48
Project Type: Research Project
Image of ELLIS - European Laboratory for Learning and Intelligent SystemsELLIS - European Laboratory for Learning and Intelligent Systems
UV-PIs: Gustau Camps-Valls
PIs: Markus Reichstein
Duration: (2019-)
Role: Universitat de València co-coordinates the research program "Machine Learning for the Earth and Climate Sciences"
Project Type: Research Program
Image of HERMES: Hybrid Estimation and Remote Sensing Monitoring of Evaporation and Soil MoistureHERMES: Hybrid Estimation and Remote Sensing Monitoring of Evaporation and Soil Moisture
UV-PIs: Gustau Camps-Valls, Alvaro Moreno
PIs: Diego Miralles (Uni Ghent), Miguel Mahecha (Uni Leipzig)
Duration: (2023-2024)
Funding Source: BELSPO Stereo IV Research program
Partners: Uni Ghent, Uni Leipzig
Image of i-AIDA: International AI Doctoral Academyi-AIDA: International AI Doctoral Academy
UV-PIs: G. Camps-Valls
Duration: (2022--)
Funding Source: ICT-48
Image of iMIRACLI: innovative MachIne leaRning to constrain Aerosol-cloud CLimate ImpactsiMIRACLI: innovative MachIne leaRning to constrain Aerosol-cloud CLimate Impacts
PIs: G. Camps-Valls
Duration: (2020-2023)
Funding Source: ETN Marie Curie Training Network
Project Type: Training Network
Image of MALOC: MAchine Learning for assessing Ocean ClimateMALOC: MAchine Learning for assessing Ocean Climate
UV-PIs: V. Nieves
Duration: (2020-2024)
Funding Source: Generalitat Valenciana - Plan GenT (CIDEGENT/2019/055)
Image of MALOPH: A novel MAchine Learning based perspective to identify and model Ocean Precursors to extreme Hurricane developmentMALOPH: A novel MAchine Learning based perspective to identify and model Ocean Precursors to extreme Hurricane development
UV-PIs: V. Nieves
Duration: (2021-2024)
Funding Source: European Space Agency - Open Discovery Ideas Channel
Image of MediTwin: Mediterranean Digital Twin Network for Understanding Climate ExtremesMediTwin: Mediterranean Digital Twin Network for Understanding Climate Extremes
UV-PIs: Gustau Camps-Valls
Duration: (2024-2027)
Image of OpenSR — Robust, accountable super-resolution for Sentinel-2 and beyondOpenSR — Robust, accountable super-resolution for Sentinel-2 and beyond
UV-PIs: L. Gómez-Chova
PIs: F. Kalaitzis (U. Oxford)
Duration: (2022-2024)
Funding Source: ESA - Towards Explainable AI: Application to Trustworthy Super-Resolution
Partners: U. Oxford
Image of ThinkingEarth - Copernicus Foundation Models for a Thinking EarthThinkingEarth - Copernicus Foundation Models for a Thinking Earth
UV-PIs: Gustau Camps-Valls
Duration: (2024-2027)
Project Type: Research Project
Image of USMILE: Understanding and Modeling the Earth System with Machine LearningUSMILE: Understanding and Modeling the Earth System with Machine Learning
UV-PIs: G. Camps-Valls
PIs: V. Eyring, M. Reichstein, P. Gentine
Duration: (2020-2026)
Funding Source: ERC Synergy Grant
Project Type: Research Project
Image of XAIDA: Extreme Events - Artificial Intelligence for Detection and AttributionXAIDA: Extreme Events - Artificial Intelligence for Detection and Attribution
UV-PIs: Gustau Camps-Valls
Duration: (2021-2025)
Funding Source: EU H2020
Project Type: Research Project

Past projects

Image of Analysis of Water Relations in the Soil-Plant Continuum Using Microwave-Lidar SynergyAnalysis of Water Relations in the Soil-Plant Continuum Using Microwave-Lidar Synergy
UV-PIs: M. Piles
PIs: D. Chaparro, M. Vall-llossera, D. Entekhabi
Duration: (2020-2021)
Funding Source: MIT-Spain-"la Caixa" Foundation Seed Fund
Image of Application of New Visual-Statistical Models for the Restoration of Images Affected by Noise and BlurApplication of New Visual-Statistical Models for the Restoration of Images Affected by Noise and Blur
Duration: (2004-2006)
Funding Source: Spanish Ministry of Defence
Image of Applications in Biomedicine and Human Vision ModelsApplications in Biomedicine and Human Vision Models
Duration: (2004-2005)
Funding Source: Scientific Excellence Groups of the Valencian Community
Image of CIMR Mission Application StudyCIMR Mission Application Study
UV-PIs: M. Piles
PIs: Christopher Merchant
Duration: (2018-2019)
Funding Source: ESA
Image of CIMR Mission Requirements and Performance ConsolidationCIMR Mission Requirements and Performance Consolidation
UV-PIs: M. Piles
PIs: Thomas Lavergne (Norwegian Meteorological Institute)
Duration: (2018-2019)
Funding Source: ESA
Image of Classification of Hyperspectral Remote Sensing Images Based on Semi-Supervised Kernel MethodsClassification of Hyperspectral Remote Sensing Images Based on Semi-Supervised Kernel Methods
Duration: (2006-2008)
Funding Source: Spanish Ministry of Education and Science
Image of Cloud Detection in the CloudCloud Detection in the Cloud
UV-PIs: L. Gomez-Chova
Duration: (2016-2017)
Funding Source: Google Earth Engine Research Award
Image of CLOUDSAT: Machine Learning in Earth Observation: Cloud Screening of Satellite ImagesCLOUDSAT: Machine Learning in Earth Observation: Cloud Screening of Satellite Images
UV-PIs: L. Gómez-Chova
Duration: (2016-2019)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Consolidation of Scientific Baseline for MTG-IRS L2 Processing: Role of Non-Linear Regression MethodsConsolidation of Scientific Baseline for MTG-IRS L2 Processing: Role of Non-Linear Regression Methods
Duration: (2009-2010)
Funding Source: EUMETSAT
Image of Control objetivo de la calidad óptica de lentes de contacto desechables durante su periodo de utilizaciónControl objetivo de la calidad óptica de lentes de contacto desechables durante su periodo de utilización
UV-PIs: J. Malo
Duration: (1995)
Funding Source: Johnson & Johnson Europe
Image of Deep Networks in the Brain: From Measurements to AlgorithmsDeep Networks in the Brain: From Measurements to Algorithms
UV-PIs: J. Malo
Duration: (2018-2021)
Funding Source: Spanish Ministry of Science, Innovation and Universities (MCIU/AEI/FEDER, UE)
Image of Design, Development, and Validation of a Device for the Early Detection of Visual AnomaliesDesign, Development, and Validation of a Device for the Early Detection of Visual Anomalies
Duration: (2001-2003)
Funding Source: CICYT Ministry of Science and Technology
Image of Development of a Local Digital Video Communication System for Residential CommunitiesDevelopment of a Local Digital Video Communication System for Residential Communities
Duration: (2000-2001)
Funding Source: CICYT-FEDER Ministry of Science and Technology
Image of Development of CHRIS/PROBA Modules for the BEAM ToolboxDevelopment of CHRIS/PROBA Modules for the BEAM Toolbox
Duration: (2007-2008)
Funding Source: ESA
Image of Efficient Color Video Compression Using Sequence Analysis Based on Perceptually Significant Image RepresentationsEfficient Color Video Compression Using Sequence Analysis Based on Perceptually Significant Image Representations
Duration: (1999-2001)
Funding Source: CICYT-FEDER Ministry of Science and Technology
Image of EODIX: Advanced Methodologies in Earth Observation: Optical Data Calibration and Information ExtractionEODIX: Advanced Methodologies in Earth Observation: Optical Data Calibration and Information Extraction
Duration: (2009-2011)
Image of ESA Climate Change Initiative (CCI) Phase 1: Essential Climate Variable (ECV) CloudESA Climate Change Initiative (CCI) Phase 1: Essential Climate Variable (ECV) Cloud
Duration: (2010-2013)
Funding Source: ESA
Image of ESA Climate Change Initiative Phase II Soil Moisture (CCI SM 2 Project)ESA Climate Change Initiative Phase II Soil Moisture (CCI SM 2 Project)
PIs: M. Piles (Visiting Scientist), Wouter Dorigo (TU Wien, PI)
Duration: (2017)
Funding Source: ESA
Image of Estimación de parámetros biofísicos para la misión FLEX a partir de Sentinel-3Estimación de parámetros biofísicos para la misión FLEX a partir de Sentinel-3
Duration: (2011-2013)
Funding Source: ESA
Image of FLUXCOM: An initiative to upscale biosphere-atmosphere fluxes from FLUXNET sites to continental and global scalesFLUXCOM: An initiative to upscale biosphere-atmosphere fluxes from FLUXNET sites to continental and global scales
PIs: PI Martin Jung
Duration: (Max Planck Institute for Biogeochemistry)
Image of GEOLEARN: Advances in Machine Learning for Large Scale Remote Sensing Data ProcessingGEOLEARN: Advances in Machine Learning for Large Scale Remote Sensing Data Processing
UV-PIs: Jordi Munoz-Mari
Duration: (2015-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of HYPERCLASS: Métodos avanzados para la clasificación de imágenes hiperespectralesHYPERCLASS: Métodos avanzados para la clasificación de imágenes hiperespectrales
Duration: (2005-2007)
Image of Improvement of the current nonlinear regression retrieval (NLR) implemented within the MTGIRS prototype processorImprovement of the current nonlinear regression retrieval (NLR) implemented within the MTGIRS prototype processor
Duration: (2014)
Funding Source: EUMETSAT
Image of Integration of nonlinear perceptual and statistical representations in image restoration and codingIntegration of nonlinear perceptual and statistical representations in image restoration and coding
Duration: (2007-2009)
Funding Source: Spanish Ministry of Science and Technology
Image of Interpolación normalizada de imágenes provenientes de múltiples sensores. Interpolación normalizada de imágenes LANDSAT mediante downscalingInterpolación normalizada de imágenes provenientes de múltiples sensores. Interpolación normalizada de imágenes LANDSAT mediante downscaling
Duration: (2009)
Funding Source: Instituto Geográfico Nacional (IGN)
Image of Knowledge extraction from the Davalor Automated Vision Evaluator (EVA)Knowledge extraction from the Davalor Automated Vision Evaluator (EVA)
UV-PIs: Jesus Malo
Duration: (2015-2017)
Funding Source: Davalor Salud Ltd.
Image of LEAVES: LEArning drivers of Vegetation health from Earth observation SynergiesLEAVES: LEArning drivers of Vegetation health from Earth observation Synergies
UV-PIs: M. Piles and J. Amorós
Duration: (2019-2021)
Funding Source: Spanish Ministry of Science, Innovation and Universities (MCIU/AEI/FEDER, UE)
Image of LIFE-VISION: Learning Image Features to Encode Visual InformationLIFE-VISION: Learning Image Features to Encode Visual Information
Duration: (2012-2015)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Mapping and the citizen sensorMapping and the citizen sensor
Duration: (2013-2016)
Funding Source: ICT COST Action
Image of MERIS/AATSR synergy algorithms for cloud screening, aerosol retrieval, and atmospheric correctionMERIS/AATSR synergy algorithms for cloud screening, aerosol retrieval, and atmospheric correction
Duration: (2008-2009)
Funding Source: European Space Agency (ESA) ESRIN
Image of MIPRCV: Multimodal Interaction in Pattern Recognition and Computer VisionMIPRCV: Multimodal Interaction in Pattern Recognition and Computer Vision
Duration: (2007-2012)
Funding Source: CONSOLIDER-INGENIO
Image of Modelos no gaussianos para la representación de imágenes y secuenciasModelos no gaussianos para la representación de imágenes y secuencias
Duration: (2003)
Funding Source: Universitat de València
Image of Modelos visuales-estadísticos de representación de imágenes y sus aplicacionesModelos visuales-estadísticos de representación de imágenes y sus aplicaciones
Duration: (2003-2006)
Funding Source: Spanish Ministry of Science and Technology
Image of Motion Estimation and Interpretation of Image SequencesMotion Estimation and Interpretation of Image Sequences
Duration: (1996-1998)
Funding Source: CICYT
Image of Natural Image Statistics: Non-parametric Models, Bayesian Models, and Computational Neuroscience for Image ProcessingNatural Image Statistics: Non-parametric Models, Bayesian Models, and Computational Neuroscience for Image Processing
Duration: (2010-2012)
Funding Source: Spanish Ministry of Economy
Image of New Compressive Sensing Algorithms from Natural and Artificial Brain NetworksNew Compressive Sensing Algorithms from Natural and Artificial Brain Networks
UV-PIs: J. Malo (co-IP)
PIs: L. Martinez-Otero (co-IP in Instit. Neurosci. CSIC)
Duration: (2014-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Optimización de los algoritmos de compresión de imágenes biomédicas mediante la utilización de parámetros perceptualesOptimización de los algoritmos de compresión de imágenes biomédicas mediante la utilización de parámetros perceptuales
Duration: (1997-1998)
Funding Source: IVEI Generalitat Valenciana
Image of Procesado de imágenes de sensores de satélite de media resolución y su integración espectro-temporal con sensores de satélite de baja resoluciónProcesado de imágenes de sensores de satélite de media resolución y su integración espectro-temporal con sensores de satélite de baja resolución
Duration: (2010-2011)
Funding Source: Instituto Geográfico Nacional (IGN)
Image of PV-CDRR: Clouds Detection Algorithms for Proba-VPV-CDRR: Clouds Detection Algorithms for Proba-V
PIs: L. Gómez-Chova
Duration: (2016-2017)
Funding Source: ESA IDEAS+ research grant
Image of Quantifying Visual Beauty: Neuroaesthetics and Machine LearningQuantifying Visual Beauty: Neuroaesthetics and Machine Learning
UV-PIs: J. Malo
Duration: (2015-2017)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Red de Excelencia KERMES: Advances in kernel methods for structured dataRed de Excelencia KERMES: Advances in kernel methods for structured data
UV-PIs: G. Camps-Valls (Coordinator)
Duration: (2017-2019)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Red de Excelencia sobre Neurociencia Visual y Ciencias de la ComputaciónRed de Excelencia sobre Neurociencia Visual y Ciencias de la Computación
UV-PIs: J. Malo (Coordinator in Univ. Valencia)
PIs: X. Otazu (Global Coordinator)
Duration: (2016-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of RELEARN: RE-using Field Reference Data in Space and Time for Vegetation MappingRELEARN: RE-using Field Reference Data in Space and Time for Vegetation Mapping
Duration: (2011-2013)
Funding Source: BELSPO
Image of SCALE: Causal Inference in the Human-Biosphere Coupled SystemSCALE: Causal Inference in the Human-Biosphere Coupled System
PIs: G. Camps-Valls
Duration: (2020-2022)
Funding Source: Fundación BBVA
Image of SEDAL: Statistical Learning for Earth Observation Data AnalysisSEDAL: Statistical Learning for Earth Observation Data Analysis
UV-PIs: G. Camps-Valls
Duration: (2015-2020)
Image of SenSyF: Sentinels Synergy FrameworkSenSyF: Sentinels Synergy Framework
Duration: (2013-2015)
Funding Source: EU 7th Framework Programme
Image of Spanish Network for the Advancement and Transfer of Applied Computational Intelligence (ATICA)Spanish Network for the Advancement and Transfer of Applied Computational Intelligence (ATICA)
UV-PIs: J. Malo
PIs: X. Otazu (Global Coordinator)
Duration: (2016-2018)
Funding Source: Spanish Ministry of Economy and Competitiveness
Image of Study on Pattern Recognition Based Cloud Detection Over LandmarksStudy on Pattern Recognition Based Cloud Detection Over Landmarks
Duration: (2015)
Funding Source: EUMETSAT
Image of Support Vector Machines and Computational Human Vision Models for Image Coding and DenoisingSupport Vector Machines and Computational Human Vision Models for Image Coding and Denoising
Duration: (2006-2008)
Funding Source: Generalitat Valenciana
Image of VLC-BioMedic/BioClinic Agreements for the Use of fMRI Scanner FacilitiesVLC-BioMedic/BioClinic Agreements for the Use of fMRI Scanner Facilities
UV-PIs: J. Malo
PIs: A. Alberich (La Fe), C. Montoliu (Clinic)
Duration: (2015-2017)
Funding Source: Generalitat Valenciana
\ No newline at end of file diff --git a/projects/index.xml b/projects/index.xml index 547779e6..f903610a 100644 --- a/projects/index.xml +++ b/projects/index.xml @@ -1 +1 @@ -projects on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects/Recent content in projects on ISP - Image and Signal Processing groupHugoen-usAI for complex systems: Brain, Earth, Climate, Societyhttps://ipl-uv.github.io/projects/current/ai_complex/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/ai_complex/AI4PEX: Artificial Intelligence for enhanced representation of processes and extremes in Earth System Modelshttps://ipl-uv.github.io/projects/current/ai4pex/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/ai4pex/Analysis of Water Relations in the Soil-Plant Continuum Using Microwave-Lidar Synergyhttps://ipl-uv.github.io/projects/past/microwave-lidar-synergy/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/microwave-lidar-synergy/Application of New Visual-Statistical Models for the Restoration of Images Affected by Noise and Blurhttps://ipl-uv.github.io/projects/past/visual-models-restoration/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/visual-models-restoration/Applications in Biomedicine and Human Vision Modelshttps://ipl-uv.github.io/projects/past/biomedicine-vision-models/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/biomedicine-vision-models/Causal Inference to Understand the Impact of Humanitarian Interventions on Food Security in Africahttps://ipl-uv.github.io/projects/current/causal_inference/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/causal_inference/CIMR Mission Application Studyhttps://ipl-uv.github.io/projects/past/cimr-application/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/cimr-application/CIMR Mission Requirements and Performance Consolidationhttps://ipl-uv.github.io/projects/past/cimr-requirements/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/cimr-requirements/CIMR-DEVALGO: CIMR Level-2 Algorithm Developmenthttps://ipl-uv.github.io/projects/current/cimr/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/cimr/Classification of Hyperspectral Remote Sensing Images Based on Semi-Supervised Kernel Methodshttps://ipl-uv.github.io/projects/past/classification-of-hyperspectral-remote-sensing-images/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/classification-of-hyperspectral-remote-sensing-images/Cloud Detection in the Cloudhttps://ipl-uv.github.io/projects/past/cloud-detection-in-the-cloud/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/cloud-detection-in-the-cloud/CLOUDSAT: Machine Learning in Earth Observation: Cloud Screening of Satellite Imageshttps://ipl-uv.github.io/projects/past/cloudsat/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/cloudsat/Consolidation of Scientific Baseline for MTG-IRS L2 Processing: Role of Non-Linear Regression Methodshttps://ipl-uv.github.io/projects/past/consolidation-of-scientific-baseline/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/consolidation-of-scientific-baseline/Control objetivo de la calidad óptica de lentes de contacto desechables durante su periodo de utilizaciónhttps://ipl-uv.github.io/projects/past/control-objetivo-de-la-calidad-optica/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/control-objetivo-de-la-calidad-optica/Deep Networks in the Brain: From Measurements to Algorithmshttps://ipl-uv.github.io/projects/past/deep-networks-brain/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/deep-networks-brain/DEEPCLOUD: Deep learning tools for operational cloud detection in Earth observation satellite images.https://ipl-uv.github.io/projects/current/deep_cloud/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/deep_cloud/DeepCube: Explainable AI pipelines for big Copernicus datahttps://ipl-uv.github.io/projects/current/deepcube/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/deepcube/DeepExtremes: Multi-Hazards, Compounds and Cascade eventshttps://ipl-uv.github.io/projects/current/deep_extremes/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/deep_extremes/Design, Development, and Validation of a Device for the Early Detection of Visual Anomalieshttps://ipl-uv.github.io/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/Development of a Local Digital Video Communication System for Residential Communitieshttps://ipl-uv.github.io/projects/past/desarrollo-de-un-sistema-digital-local/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/desarrollo-de-un-sistema-digital-local/Development of CHRIS/PROBA Modules for the BEAM Toolboxhttps://ipl-uv.github.io/projects/past/development-of-chris-proba-modules/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/development-of-chris-proba-modules/Efficient Color Video Compression Using Sequence Analysis Based on Perceptually Significant Image Representationshttps://ipl-uv.github.io/projects/past/compresion-eficiente-de-video/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/compresion-eficiente-de-video/ELIAS: European Lighthouse of AI for Sustainabilityhttps://ipl-uv.github.io/projects/current/elias/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/elias/ELISE: European Learning And Intelligent Systems Excellencehttps://ipl-uv.github.io/projects/current/elise/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/elise/ELLIS - European Laboratory for Learning and Intelligent Systemshttps://ipl-uv.github.io/projects/current/ellis/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/ellis/EODIX: Advanced Methodologies in Earth Observation: Optical Data Calibration and Information Extractionhttps://ipl-uv.github.io/projects/past/eodix-metodologias-avanzadas/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/eodix-metodologias-avanzadas/ESA Climate Change Initiative (CCI) Phase 1: Essential Climate Variable (ECV) Cloudhttps://ipl-uv.github.io/projects/past/esa-climate-change-initiative/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/esa-climate-change-initiative/ESA Climate Change Initiative Phase II Soil Moisture (CCI SM 2 Project)https://ipl-uv.github.io/projects/past/cci-sm2/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/cci-sm2/Estimación de parámetros biofísicos para la misión FLEX a partir de Sentinel-3https://ipl-uv.github.io/projects/past/estimacion-de-parametros-biofisicos/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/estimacion-de-parametros-biofisicos/FLUXCOM: An initiative to upscale biosphere-atmosphere fluxes from FLUXNET sites to continental and global scaleshttps://ipl-uv.github.io/projects/past/fluxcom/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/fluxcom/GEOLEARN: Advances in Machine Learning for Large Scale Remote Sensing Data Processinghttps://ipl-uv.github.io/projects/past/geolearn/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/geolearn/HERMES: Hybrid Estimation and Remote Sensing Monitoring of Evaporation and Soil Moisturehttps://ipl-uv.github.io/projects/current/hermes/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/hermes/HYPERCLASS: Métodos avanzados para la clasificación de imágenes hiperespectraleshttps://ipl-uv.github.io/projects/past/hyperclass-metodos-avanzados/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/hyperclass-metodos-avanzados/i-AIDA: International AI Doctoral Academyhttps://ipl-uv.github.io/projects/current/i_aida/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/i_aida/iMIRACLI: innovative MachIne leaRning to constrain Aerosol-cloud CLimate Impactshttps://ipl-uv.github.io/projects/current/imiracli/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/imiracli/Improvement of the current nonlinear regression retrieval (NLR) implemented within the MTGIRS prototype processorhttps://ipl-uv.github.io/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/Integration of nonlinear perceptual and statistical representations in image restoration and codinghttps://ipl-uv.github.io/projects/past/integration-of-nonlinear-perceptual/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/integration-of-nonlinear-perceptual/Interpolación normalizada de imágenes provenientes de múltiples sensores. Interpolación normalizada de imágenes LANDSAT mediante downscalinghttps://ipl-uv.github.io/projects/past/interpolacion-normalizada-de-imagenes/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/interpolacion-normalizada-de-imagenes/Knowledge extraction from the Davalor Automated Vision Evaluator (EVA)https://ipl-uv.github.io/projects/past/knowledge-extraction-from-davalor-eva/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/knowledge-extraction-from-davalor-eva/LEAVES: LEArning drivers of Vegetation health from Earth observation Synergieshttps://ipl-uv.github.io/projects/past/leaves/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/leaves/LIFE-VISION: Learning Image Features to Encode Visual Informationhttps://ipl-uv.github.io/projects/past/life-vision/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/life-vision/MALOC: MAchine Learning for assessing Ocean Climatehttps://ipl-uv.github.io/projects/current/maloc/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/maloc/MALOPH: A novel MAchine Learning based perspective to identify and model Ocean Precursors to extreme Hurricane developmenthttps://ipl-uv.github.io/projects/current/maloph/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/maloph/Mapping and the citizen sensorhttps://ipl-uv.github.io/projects/past/mapping-and-the-citizen-sensor/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/mapping-and-the-citizen-sensor/MediTwin: Mediterranean Digital Twin Network for Understanding Climate Extremeshttps://ipl-uv.github.io/projects/current/meditwin/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/meditwin/MERIS/AATSR synergy algorithms for cloud screening, aerosol retrieval, and atmospheric correctionhttps://ipl-uv.github.io/projects/past/meris-aatsr-synergy-algorithms/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/meris-aatsr-synergy-algorithms/MIPRCV: Multimodal Interaction in Pattern Recognition and Computer Visionhttps://ipl-uv.github.io/projects/past/miprcv-multimodal-interaction/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/miprcv-multimodal-interaction/Modelos no gaussianos para la representación de imágenes y secuenciashttps://ipl-uv.github.io/projects/past/modelos-no-gaussianos/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/modelos-no-gaussianos/Modelos visuales-estadísticos de representación de imágenes y sus aplicacioneshttps://ipl-uv.github.io/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/Motion Estimation and Interpretation of Image Sequenceshttps://ipl-uv.github.io/projects/past/estimacion-de-movimiento-e-interpretacion/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/estimacion-de-movimiento-e-interpretacion/Natural Image Statistics: Non-parametric Models, Bayesian Models, and Computational Neuroscience for Image Processinghttps://ipl-uv.github.io/projects/past/natural-image-statistics/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/natural-image-statistics/New Compressive Sensing Algorithms from Natural and Artificial Brain Networkshttps://ipl-uv.github.io/projects/past/compressive-sensing-algorithms/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/compressive-sensing-algorithms/OpenSR — Robust, accountable super-resolution for Sentinel-2 and beyondhttps://ipl-uv.github.io/projects/current/opensr/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/opensr/Optimización de los algoritmos de compresión de imágenes biomédicas mediante la utilización de parámetros perceptualeshttps://ipl-uv.github.io/projects/past/optimizacion-de-los-algoritmos-de-compresion/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/optimizacion-de-los-algoritmos-de-compresion/Procesado de imágenes de sensores de satélite de media resolución y su integración espectro-temporal con sensores de satélite de baja resoluciónhttps://ipl-uv.github.io/projects/past/procesado-de-imagenes-de-sensores/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/procesado-de-imagenes-de-sensores/PV-CDRR: Clouds Detection Algorithms for Proba-Vhttps://ipl-uv.github.io/projects/past/pv-cdrr/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/pv-cdrr/Quantifying Visual Beauty: Neuroaesthetics and Machine Learninghttps://ipl-uv.github.io/projects/past/quantifying-visual-beauty/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/quantifying-visual-beauty/Red de Excelencia KERMES: Advances in kernel methods for structured datahttps://ipl-uv.github.io/projects/past/kermes/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/kermes/Red de Excelencia sobre Neurociencia Visual y Ciencias de la Computaciónhttps://ipl-uv.github.io/projects/past/neurociencia-visual-ciencias-computacion/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/neurociencia-visual-ciencias-computacion/RELEARN: RE-using Field Reference Data in Space and Time for Vegetation Mappinghttps://ipl-uv.github.io/projects/past/relearn/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/relearn/SCALE: Causal Inference in the Human-Biosphere Coupled Systemhttps://ipl-uv.github.io/projects/past/scale/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/scale/SEDAL: Statistical Learning for Earth Observation Data Analysishttps://ipl-uv.github.io/projects/past/sedal/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/sedal/SenSyF: Sentinels Synergy Frameworkhttps://ipl-uv.github.io/projects/past/sensyf/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/sensyf/Spanish Network for the Advancement and Transfer of Applied Computational Intelligence (ATICA)https://ipl-uv.github.io/projects/past/atica/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/atica/Study on Pattern Recognition Based Cloud Detection Over Landmarkshttps://ipl-uv.github.io/projects/past/pattern-recognition-cloud-detection/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/pattern-recognition-cloud-detection/Support Vector Machines and Computational Human Vision Models for Image Coding and Denoisinghttps://ipl-uv.github.io/projects/past/support-vector-machines-and-computational-human-vision-models/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/support-vector-machines-and-computational-human-vision-models/ThinkingEarth - Copernicus Foundation Models for a Thinking Earthhttps://ipl-uv.github.io/projects/current/thinkingearth/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/thinkingearth/USMILE: Understanding and Modeling the Earth System with Machine Learninghttps://ipl-uv.github.io/projects/current/usmile/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/usmile/VLC-BioMedic/BioClinic Agreements for the Use of fMRI Scanner Facilitieshttps://ipl-uv.github.io/projects/past/vlc-biomedic-bioclinic/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/past/vlc-biomedic-bioclinic/XAIDA: Extreme Events - Artificial Intelligence for Detection and Attributionhttps://ipl-uv.github.io/projects/current/xaida/Mon, 01 Jan 0001 00:00:00 +0000https://ipl-uv.github.io/projects/current/xaida/ \ No newline at end of file +projects on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects/Recent content in projects on ISP - Image and Signal Processing groupHugoen-usAI for complex systems: Brain, Earth, Climate, Societyhttps://isp.uv.es/github/projects/current/ai_complex/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/ai_complex/AI4PEX: Artificial Intelligence for enhanced representation of processes and extremes in Earth System Modelshttps://isp.uv.es/github/projects/current/ai4pex/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/ai4pex/Analysis of Water Relations in the Soil-Plant Continuum Using Microwave-Lidar Synergyhttps://isp.uv.es/github/projects/past/microwave-lidar-synergy/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/microwave-lidar-synergy/Application of New Visual-Statistical Models for the Restoration of Images Affected by Noise and Blurhttps://isp.uv.es/github/projects/past/visual-models-restoration/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/visual-models-restoration/Applications in Biomedicine and Human Vision Modelshttps://isp.uv.es/github/projects/past/biomedicine-vision-models/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/biomedicine-vision-models/Causal Inference to Understand the Impact of Humanitarian Interventions on Food Security in Africahttps://isp.uv.es/github/projects/current/causal_inference/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/causal_inference/CIMR Mission Application Studyhttps://isp.uv.es/github/projects/past/cimr-application/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/cimr-application/CIMR Mission Requirements and Performance Consolidationhttps://isp.uv.es/github/projects/past/cimr-requirements/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/cimr-requirements/CIMR-DEVALGO: CIMR Level-2 Algorithm Developmenthttps://isp.uv.es/github/projects/current/cimr/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/cimr/Classification of Hyperspectral Remote Sensing Images Based on Semi-Supervised Kernel Methodshttps://isp.uv.es/github/projects/past/classification-of-hyperspectral-remote-sensing-images/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/classification-of-hyperspectral-remote-sensing-images/Cloud Detection in the Cloudhttps://isp.uv.es/github/projects/past/cloud-detection-in-the-cloud/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/cloud-detection-in-the-cloud/CLOUDSAT: Machine Learning in Earth Observation: Cloud Screening of Satellite Imageshttps://isp.uv.es/github/projects/past/cloudsat/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/cloudsat/Consolidation of Scientific Baseline for MTG-IRS L2 Processing: Role of Non-Linear Regression Methodshttps://isp.uv.es/github/projects/past/consolidation-of-scientific-baseline/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/consolidation-of-scientific-baseline/Control objetivo de la calidad óptica de lentes de contacto desechables durante su periodo de utilizaciónhttps://isp.uv.es/github/projects/past/control-objetivo-de-la-calidad-optica/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/control-objetivo-de-la-calidad-optica/Deep Networks in the Brain: From Measurements to Algorithmshttps://isp.uv.es/github/projects/past/deep-networks-brain/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/deep-networks-brain/DEEPCLOUD: Deep learning tools for operational cloud detection in Earth observation satellite images.https://isp.uv.es/github/projects/current/deep_cloud/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/deep_cloud/DeepCube: Explainable AI pipelines for big Copernicus datahttps://isp.uv.es/github/projects/current/deepcube/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/deepcube/DeepExtremes: Multi-Hazards, Compounds and Cascade eventshttps://isp.uv.es/github/projects/current/deep_extremes/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/deep_extremes/Design, Development, and Validation of a Device for the Early Detection of Visual Anomalieshttps://isp.uv.es/github/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/Development of a Local Digital Video Communication System for Residential Communitieshttps://isp.uv.es/github/projects/past/desarrollo-de-un-sistema-digital-local/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/desarrollo-de-un-sistema-digital-local/Development of CHRIS/PROBA Modules for the BEAM Toolboxhttps://isp.uv.es/github/projects/past/development-of-chris-proba-modules/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/development-of-chris-proba-modules/Efficient Color Video Compression Using Sequence Analysis Based on Perceptually Significant Image Representationshttps://isp.uv.es/github/projects/past/compresion-eficiente-de-video/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/compresion-eficiente-de-video/ELIAS: European Lighthouse of AI for Sustainabilityhttps://isp.uv.es/github/projects/current/elias/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/elias/ELISE: European Learning And Intelligent Systems Excellencehttps://isp.uv.es/github/projects/current/elise/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/elise/ELLIS - European Laboratory for Learning and Intelligent Systemshttps://isp.uv.es/github/projects/current/ellis/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/ellis/EODIX: Advanced Methodologies in Earth Observation: Optical Data Calibration and Information Extractionhttps://isp.uv.es/github/projects/past/eodix-metodologias-avanzadas/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/eodix-metodologias-avanzadas/ESA Climate Change Initiative (CCI) Phase 1: Essential Climate Variable (ECV) Cloudhttps://isp.uv.es/github/projects/past/esa-climate-change-initiative/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/esa-climate-change-initiative/ESA Climate Change Initiative Phase II Soil Moisture (CCI SM 2 Project)https://isp.uv.es/github/projects/past/cci-sm2/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/cci-sm2/Estimación de parámetros biofísicos para la misión FLEX a partir de Sentinel-3https://isp.uv.es/github/projects/past/estimacion-de-parametros-biofisicos/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/estimacion-de-parametros-biofisicos/FLUXCOM: An initiative to upscale biosphere-atmosphere fluxes from FLUXNET sites to continental and global scaleshttps://isp.uv.es/github/projects/past/fluxcom/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/fluxcom/GEOLEARN: Advances in Machine Learning for Large Scale Remote Sensing Data Processinghttps://isp.uv.es/github/projects/past/geolearn/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/geolearn/HERMES: Hybrid Estimation and Remote Sensing Monitoring of Evaporation and Soil Moisturehttps://isp.uv.es/github/projects/current/hermes/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/hermes/HYPERCLASS: Métodos avanzados para la clasificación de imágenes hiperespectraleshttps://isp.uv.es/github/projects/past/hyperclass-metodos-avanzados/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/hyperclass-metodos-avanzados/i-AIDA: International AI Doctoral Academyhttps://isp.uv.es/github/projects/current/i_aida/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/i_aida/iMIRACLI: innovative MachIne leaRning to constrain Aerosol-cloud CLimate Impactshttps://isp.uv.es/github/projects/current/imiracli/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/imiracli/Improvement of the current nonlinear regression retrieval (NLR) implemented within the MTGIRS prototype processorhttps://isp.uv.es/github/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/Integration of nonlinear perceptual and statistical representations in image restoration and codinghttps://isp.uv.es/github/projects/past/integration-of-nonlinear-perceptual/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/integration-of-nonlinear-perceptual/Interpolación normalizada de imágenes provenientes de múltiples sensores. Interpolación normalizada de imágenes LANDSAT mediante downscalinghttps://isp.uv.es/github/projects/past/interpolacion-normalizada-de-imagenes/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/interpolacion-normalizada-de-imagenes/Knowledge extraction from the Davalor Automated Vision Evaluator (EVA)https://isp.uv.es/github/projects/past/knowledge-extraction-from-davalor-eva/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/knowledge-extraction-from-davalor-eva/LEAVES: LEArning drivers of Vegetation health from Earth observation Synergieshttps://isp.uv.es/github/projects/past/leaves/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/leaves/LIFE-VISION: Learning Image Features to Encode Visual Informationhttps://isp.uv.es/github/projects/past/life-vision/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/life-vision/MALOC: MAchine Learning for assessing Ocean Climatehttps://isp.uv.es/github/projects/current/maloc/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/maloc/MALOPH: A novel MAchine Learning based perspective to identify and model Ocean Precursors to extreme Hurricane developmenthttps://isp.uv.es/github/projects/current/maloph/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/maloph/Mapping and the citizen sensorhttps://isp.uv.es/github/projects/past/mapping-and-the-citizen-sensor/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/mapping-and-the-citizen-sensor/MediTwin: Mediterranean Digital Twin Network for Understanding Climate Extremeshttps://isp.uv.es/github/projects/current/meditwin/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/meditwin/MERIS/AATSR synergy algorithms for cloud screening, aerosol retrieval, and atmospheric correctionhttps://isp.uv.es/github/projects/past/meris-aatsr-synergy-algorithms/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/meris-aatsr-synergy-algorithms/MIPRCV: Multimodal Interaction in Pattern Recognition and Computer Visionhttps://isp.uv.es/github/projects/past/miprcv-multimodal-interaction/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/miprcv-multimodal-interaction/Modelos no gaussianos para la representación de imágenes y secuenciashttps://isp.uv.es/github/projects/past/modelos-no-gaussianos/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/modelos-no-gaussianos/Modelos visuales-estadísticos de representación de imágenes y sus aplicacioneshttps://isp.uv.es/github/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/Motion Estimation and Interpretation of Image Sequenceshttps://isp.uv.es/github/projects/past/estimacion-de-movimiento-e-interpretacion/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/estimacion-de-movimiento-e-interpretacion/Natural Image Statistics: Non-parametric Models, Bayesian Models, and Computational Neuroscience for Image Processinghttps://isp.uv.es/github/projects/past/natural-image-statistics/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/natural-image-statistics/New Compressive Sensing Algorithms from Natural and Artificial Brain Networkshttps://isp.uv.es/github/projects/past/compressive-sensing-algorithms/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/compressive-sensing-algorithms/OpenSR — Robust, accountable super-resolution for Sentinel-2 and beyondhttps://isp.uv.es/github/projects/current/opensr/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/opensr/Optimización de los algoritmos de compresión de imágenes biomédicas mediante la utilización de parámetros perceptualeshttps://isp.uv.es/github/projects/past/optimizacion-de-los-algoritmos-de-compresion/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/optimizacion-de-los-algoritmos-de-compresion/Procesado de imágenes de sensores de satélite de media resolución y su integración espectro-temporal con sensores de satélite de baja resoluciónhttps://isp.uv.es/github/projects/past/procesado-de-imagenes-de-sensores/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/procesado-de-imagenes-de-sensores/PV-CDRR: Clouds Detection Algorithms for Proba-Vhttps://isp.uv.es/github/projects/past/pv-cdrr/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/pv-cdrr/Quantifying Visual Beauty: Neuroaesthetics and Machine Learninghttps://isp.uv.es/github/projects/past/quantifying-visual-beauty/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/quantifying-visual-beauty/Red de Excelencia KERMES: Advances in kernel methods for structured datahttps://isp.uv.es/github/projects/past/kermes/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/kermes/Red de Excelencia sobre Neurociencia Visual y Ciencias de la Computaciónhttps://isp.uv.es/github/projects/past/neurociencia-visual-ciencias-computacion/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/neurociencia-visual-ciencias-computacion/RELEARN: RE-using Field Reference Data in Space and Time for Vegetation Mappinghttps://isp.uv.es/github/projects/past/relearn/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/relearn/SCALE: Causal Inference in the Human-Biosphere Coupled Systemhttps://isp.uv.es/github/projects/past/scale/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/scale/SEDAL: Statistical Learning for Earth Observation Data Analysishttps://isp.uv.es/github/projects/past/sedal/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/sedal/SenSyF: Sentinels Synergy Frameworkhttps://isp.uv.es/github/projects/past/sensyf/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/sensyf/Spanish Network for the Advancement and Transfer of Applied Computational Intelligence (ATICA)https://isp.uv.es/github/projects/past/atica/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/atica/Study on Pattern Recognition Based Cloud Detection Over Landmarkshttps://isp.uv.es/github/projects/past/pattern-recognition-cloud-detection/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/pattern-recognition-cloud-detection/Support Vector Machines and Computational Human Vision Models for Image Coding and Denoisinghttps://isp.uv.es/github/projects/past/support-vector-machines-and-computational-human-vision-models/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/support-vector-machines-and-computational-human-vision-models/ThinkingEarth - Copernicus Foundation Models for a Thinking Earthhttps://isp.uv.es/github/projects/current/thinkingearth/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/thinkingearth/USMILE: Understanding and Modeling the Earth System with Machine Learninghttps://isp.uv.es/github/projects/current/usmile/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/usmile/VLC-BioMedic/BioClinic Agreements for the Use of fMRI Scanner Facilitieshttps://isp.uv.es/github/projects/past/vlc-biomedic-bioclinic/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/past/vlc-biomedic-bioclinic/XAIDA: Extreme Events - Artificial Intelligence for Detection and Attributionhttps://isp.uv.es/github/projects/current/xaida/Mon, 01 Jan 0001 00:00:00 +0000https://isp.uv.es/github/projects/current/xaida/ \ No newline at end of file diff --git a/projects/past/atica/index.html b/projects/past/atica/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/atica/index.html +++ b/projects/past/atica/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/biomedicine-vision-models/index.html b/projects/past/biomedicine-vision-models/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/biomedicine-vision-models/index.html +++ b/projects/past/biomedicine-vision-models/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/cci-sm2/index.html b/projects/past/cci-sm2/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/cci-sm2/index.html +++ b/projects/past/cci-sm2/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/cimr-application/index.html b/projects/past/cimr-application/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/cimr-application/index.html +++ b/projects/past/cimr-application/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/cimr-requirements/index.html b/projects/past/cimr-requirements/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/cimr-requirements/index.html +++ b/projects/past/cimr-requirements/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/classification-of-hyperspectral-remote-sensing-images/index.html b/projects/past/classification-of-hyperspectral-remote-sensing-images/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/classification-of-hyperspectral-remote-sensing-images/index.html +++ b/projects/past/classification-of-hyperspectral-remote-sensing-images/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/cloud-detection-in-the-cloud/index.html b/projects/past/cloud-detection-in-the-cloud/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/cloud-detection-in-the-cloud/index.html +++ b/projects/past/cloud-detection-in-the-cloud/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/cloudsat/index.html b/projects/past/cloudsat/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/cloudsat/index.html +++ b/projects/past/cloudsat/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/compresion-eficiente-de-video/index.html b/projects/past/compresion-eficiente-de-video/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/compresion-eficiente-de-video/index.html +++ b/projects/past/compresion-eficiente-de-video/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/compressive-sensing-algorithms/index.html b/projects/past/compressive-sensing-algorithms/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/compressive-sensing-algorithms/index.html +++ b/projects/past/compressive-sensing-algorithms/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/consolidation-of-scientific-baseline/index.html b/projects/past/consolidation-of-scientific-baseline/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/consolidation-of-scientific-baseline/index.html +++ b/projects/past/consolidation-of-scientific-baseline/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/control-objetivo-de-la-calidad-optica/index.html b/projects/past/control-objetivo-de-la-calidad-optica/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/control-objetivo-de-la-calidad-optica/index.html +++ b/projects/past/control-objetivo-de-la-calidad-optica/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/deep-networks-brain/index.html b/projects/past/deep-networks-brain/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/deep-networks-brain/index.html +++ b/projects/past/deep-networks-brain/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/desarrollo-de-un-sistema-digital-local/index.html b/projects/past/desarrollo-de-un-sistema-digital-local/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/desarrollo-de-un-sistema-digital-local/index.html +++ b/projects/past/desarrollo-de-un-sistema-digital-local/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/development-of-chris-proba-modules/index.html b/projects/past/development-of-chris-proba-modules/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/development-of-chris-proba-modules/index.html +++ b/projects/past/development-of-chris-proba-modules/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/index.html b/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/index.html +++ b/projects/past/diseno-desarrollo-y-validacion-de-un-dispositivo/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/eodix-metodologias-avanzadas/index.html b/projects/past/eodix-metodologias-avanzadas/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/eodix-metodologias-avanzadas/index.html +++ b/projects/past/eodix-metodologias-avanzadas/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/esa-climate-change-initiative/index.html b/projects/past/esa-climate-change-initiative/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/esa-climate-change-initiative/index.html +++ b/projects/past/esa-climate-change-initiative/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/estimacion-de-movimiento-e-interpretacion/index.html b/projects/past/estimacion-de-movimiento-e-interpretacion/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/estimacion-de-movimiento-e-interpretacion/index.html +++ b/projects/past/estimacion-de-movimiento-e-interpretacion/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/estimacion-de-parametros-biofisicos/index.html b/projects/past/estimacion-de-parametros-biofisicos/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/estimacion-de-parametros-biofisicos/index.html +++ b/projects/past/estimacion-de-parametros-biofisicos/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/fluxcom/index.html b/projects/past/fluxcom/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/fluxcom/index.html +++ b/projects/past/fluxcom/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/geolearn/index.html b/projects/past/geolearn/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/geolearn/index.html +++ b/projects/past/geolearn/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/hyperclass-metodos-avanzados/index.html b/projects/past/hyperclass-metodos-avanzados/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/hyperclass-metodos-avanzados/index.html +++ b/projects/past/hyperclass-metodos-avanzados/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/index.html b/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/index.html +++ b/projects/past/improvement-of-the-current-nonlinear-regression-retrieval/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/integration-of-nonlinear-perceptual/index.html b/projects/past/integration-of-nonlinear-perceptual/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/integration-of-nonlinear-perceptual/index.html +++ b/projects/past/integration-of-nonlinear-perceptual/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/interpolacion-normalizada-de-imagenes/index.html b/projects/past/interpolacion-normalizada-de-imagenes/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/interpolacion-normalizada-de-imagenes/index.html +++ b/projects/past/interpolacion-normalizada-de-imagenes/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/kermes/index.html b/projects/past/kermes/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/kermes/index.html +++ b/projects/past/kermes/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/knowledge-extraction-from-davalor-eva/index.html b/projects/past/knowledge-extraction-from-davalor-eva/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/knowledge-extraction-from-davalor-eva/index.html +++ b/projects/past/knowledge-extraction-from-davalor-eva/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/leaves/index.html b/projects/past/leaves/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/leaves/index.html +++ b/projects/past/leaves/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/life-vision/index.html b/projects/past/life-vision/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/life-vision/index.html +++ b/projects/past/life-vision/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/mapping-and-the-citizen-sensor/index.html b/projects/past/mapping-and-the-citizen-sensor/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/mapping-and-the-citizen-sensor/index.html +++ b/projects/past/mapping-and-the-citizen-sensor/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/meris-aatsr-synergy-algorithms/index.html b/projects/past/meris-aatsr-synergy-algorithms/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/meris-aatsr-synergy-algorithms/index.html +++ b/projects/past/meris-aatsr-synergy-algorithms/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/microwave-lidar-synergy/index.html b/projects/past/microwave-lidar-synergy/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/microwave-lidar-synergy/index.html +++ b/projects/past/microwave-lidar-synergy/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/miprcv-multimodal-interaction/index.html b/projects/past/miprcv-multimodal-interaction/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/miprcv-multimodal-interaction/index.html +++ b/projects/past/miprcv-multimodal-interaction/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/modelos-no-gaussianos/index.html b/projects/past/modelos-no-gaussianos/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/modelos-no-gaussianos/index.html +++ b/projects/past/modelos-no-gaussianos/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/index.html b/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/index.html +++ b/projects/past/modelos-visuales-estadisticos-de-representacion-de-imagenes/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/natural-image-statistics/index.html b/projects/past/natural-image-statistics/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/natural-image-statistics/index.html +++ b/projects/past/natural-image-statistics/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/neurociencia-visual-ciencias-computacion/index.html b/projects/past/neurociencia-visual-ciencias-computacion/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/neurociencia-visual-ciencias-computacion/index.html +++ b/projects/past/neurociencia-visual-ciencias-computacion/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/optimizacion-de-los-algoritmos-de-compresion/index.html b/projects/past/optimizacion-de-los-algoritmos-de-compresion/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/optimizacion-de-los-algoritmos-de-compresion/index.html +++ b/projects/past/optimizacion-de-los-algoritmos-de-compresion/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/pattern-recognition-cloud-detection/index.html b/projects/past/pattern-recognition-cloud-detection/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/pattern-recognition-cloud-detection/index.html +++ b/projects/past/pattern-recognition-cloud-detection/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/procesado-de-imagenes-de-sensores/index.html b/projects/past/procesado-de-imagenes-de-sensores/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/procesado-de-imagenes-de-sensores/index.html +++ b/projects/past/procesado-de-imagenes-de-sensores/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/pv-cdrr/index.html b/projects/past/pv-cdrr/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/pv-cdrr/index.html +++ b/projects/past/pv-cdrr/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/quantifying-visual-beauty/index.html b/projects/past/quantifying-visual-beauty/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/quantifying-visual-beauty/index.html +++ b/projects/past/quantifying-visual-beauty/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/relearn/index.html b/projects/past/relearn/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/relearn/index.html +++ b/projects/past/relearn/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/scale/index.html b/projects/past/scale/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/scale/index.html +++ b/projects/past/scale/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/sedal/index.html b/projects/past/sedal/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/sedal/index.html +++ b/projects/past/sedal/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/sensyf/index.html b/projects/past/sensyf/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/sensyf/index.html +++ b/projects/past/sensyf/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/support-vector-machines-and-computational-human-vision-models/index.html b/projects/past/support-vector-machines-and-computational-human-vision-models/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/support-vector-machines-and-computational-human-vision-models/index.html +++ b/projects/past/support-vector-machines-and-computational-human-vision-models/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/visual-models-restoration/index.html b/projects/past/visual-models-restoration/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/visual-models-restoration/index.html +++ b/projects/past/visual-models-restoration/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects/past/vlc-biomedic-bioclinic/index.html b/projects/past/vlc-biomedic-bioclinic/index.html index b48c5abd..b87a1443 100644 --- a/projects/past/vlc-biomedic-bioclinic/index.html +++ b/projects/past/vlc-biomedic-bioclinic/index.html @@ -2,4 +2,4 @@
\ No newline at end of file +
\ No newline at end of file diff --git a/projects_links/ai4cs/index.xml b/projects_links/ai4cs/index.xml index 469c6df1..f88b4815 100644 --- a/projects_links/ai4cs/index.xml +++ b/projects_links/ai4cs/index.xml @@ -1 +1 @@ -ai4cs on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/ai4cs/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ai4cs on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/ai4cs/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/ai4cs/meetings/index.xml b/projects_links/ai4cs/meetings/index.xml index 93f149bd..865ac092 100644 --- a/projects_links/ai4cs/meetings/index.xml +++ b/projects_links/ai4cs/meetings/index.xml @@ -1 +1 @@ -ai4cs on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/ai4cs/meetings/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ai4cs on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/ai4cs/meetings/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/ai4cs/soft+data/index.xml b/projects_links/ai4cs/soft+data/index.xml index 9d757337..dc5685a4 100644 --- a/projects_links/ai4cs/soft+data/index.xml +++ b/projects_links/ai4cs/soft+data/index.xml @@ -1 +1 @@ -ai4cs on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/ai4cs/soft+data/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ai4cs on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/ai4cs/soft+data/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/ai4cs/stays/index.xml b/projects_links/ai4cs/stays/index.xml index cab0aa0d..019483d9 100644 --- a/projects_links/ai4cs/stays/index.xml +++ b/projects_links/ai4cs/stays/index.xml @@ -1 +1 @@ -ai4cs on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/ai4cs/stays/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ai4cs on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/ai4cs/stays/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/ai4cs/teams/index.xml b/projects_links/ai4cs/teams/index.xml index 78ebec42..6ebfb959 100644 --- a/projects_links/ai4cs/teams/index.xml +++ b/projects_links/ai4cs/teams/index.xml @@ -1 +1 @@ -ai4cs on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/ai4cs/teams/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +ai4cs on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/ai4cs/teams/Recent content in ai4cs on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/index.html b/projects_links/index.html index 364140e7..5308eae4 100644 --- a/projects_links/index.html +++ b/projects_links/index.html @@ -2,5 +2,5 @@

ai4cs

AI4CS - AI for complex systems: Brain, Earth, Climate, Society Our vision in AI4CS is to develop novel artificial intelligence methods to model and understand com- plex systems, and more specifically the visual brain, Earth and climate systems and the biosphere- anthroposphere interactions. A perfect storm is over us: (i) an ever increasing amount of observational and sensory data, (ii) improved high resolution yet mechanistic models are available, and (iii) advanced ma- chine learning techniques able to extract patterns and identify drivers from data.

Kermes

KERMES - Advances in Kernel methods for Structured Data KERMES is a NoE on kernel methods for structured data, funded by the Spanish Ministry of Economy and Competitiveness, TEC2016-81900-REDT running between 06/17 and 06/19. In the last decade there has been an increasing availability of structured data coming from different sensory devices, with different complexities and noise sources: from time series of geospatial data and remotely sensed images, to biosignals and medical records, or Internet and communication data streams.

opensr

ESA OpenSR - Robust, accountable super-resolution for Sentinel-2 and beyond Towards Explainable AI: Application to Trustworthy Super-Resolution OpenSR is a research project funded by the European Space Agency (ESA) in the framework of the Artificial Intelligence for Earth Observation (AI4EO) initiative of ESA Φ-lab. This activity will focus on AI-empowered Super Resolution techniques for Sentinel-2 and will be demonstrated through a suite of downstream applications. -OpenSR aims to bring robust, accountable, and scalable multi-spectral super-resolution techniques to the Earth Observation (EO) community for the ubiquitous L2 and L3 pre-processing of the Sentinel-2 (S2) products.

Sedal

SEDAL: Statistical Learning for Earth Observation Data Analysis SEDAL is a research project funded by the European Research Council (ERC) Consolidator Grant 2015-2020, and directed by Prof. Gustau Camps-Valls at the Universitat de València, Spain. SEDAL is an interdisciplinary project that aims to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. In the last decade, learning models have helped to monitor land, oceans, and atmosphere through the analysis and estimation of climate and biophysical parameters.
\ No newline at end of file +

ai4cs

AI4CS - AI for complex systems: Brain, Earth, Climate, Society Our vision in AI4CS is to develop novel artificial intelligence methods to model and understand com- plex systems, and more specifically the visual brain, Earth and climate systems and the biosphere- anthroposphere interactions. A perfect storm is over us: (i) an ever increasing amount of observational and sensory data, (ii) improved high resolution yet mechanistic models are available, and (iii) advanced ma- chine learning techniques able to extract patterns and identify drivers from data.

Kermes

KERMES - Advances in Kernel methods for Structured Data KERMES is a NoE on kernel methods for structured data, funded by the Spanish Ministry of Economy and Competitiveness, TEC2016-81900-REDT running between 06/17 and 06/19. In the last decade there has been an increasing availability of structured data coming from different sensory devices, with different complexities and noise sources: from time series of geospatial data and remotely sensed images, to biosignals and medical records, or Internet and communication data streams.

opensr

ESA OpenSR - Robust, accountable super-resolution for Sentinel-2 and beyond Towards Explainable AI: Application to Trustworthy Super-Resolution OpenSR is a research project funded by the European Space Agency (ESA) in the framework of the Artificial Intelligence for Earth Observation (AI4EO) initiative of ESA Φ-lab. This activity will focus on AI-empowered Super Resolution techniques for Sentinel-2 and will be demonstrated through a suite of downstream applications. +OpenSR aims to bring robust, accountable, and scalable multi-spectral super-resolution techniques to the Earth Observation (EO) community for the ubiquitous L2 and L3 pre-processing of the Sentinel-2 (S2) products.

Sedal

SEDAL: Statistical Learning for Earth Observation Data Analysis SEDAL is a research project funded by the European Research Council (ERC) Consolidator Grant 2015-2020, and directed by Prof. Gustau Camps-Valls at the Universitat de València, Spain. SEDAL is an interdisciplinary project that aims to develop novel statistical learning methods to analyze Earth Observation (EO) satellite data. In the last decade, learning models have helped to monitor land, oceans, and atmosphere through the analysis and estimation of climate and biophysical parameters.
\ No newline at end of file diff --git a/projects_links/index.xml b/projects_links/index.xml index b33fd0e8..61d79e84 100644 --- a/projects_links/index.xml +++ b/projects_links/index.xml @@ -1 +1 @@ -Projects_links on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/Recent content in Projects_links on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Projects_links on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/Recent content in Projects_links on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/kermes/index.xml b/projects_links/kermes/index.xml index 6bb652e4..b216ca3c 100644 --- a/projects_links/kermes/index.xml +++ b/projects_links/kermes/index.xml @@ -1 +1 @@ -Kermes on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/kermes/Recent content in Kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Kermes on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/kermes/Recent content in Kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/kermes/meetings/index.xml b/projects_links/kermes/meetings/index.xml index cf43ec8e..b9d857a6 100644 --- a/projects_links/kermes/meetings/index.xml +++ b/projects_links/kermes/meetings/index.xml @@ -1 +1 @@ -kermes on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/kermes/meetings/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +kermes on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/kermes/meetings/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/kermes/soft+data/index.xml b/projects_links/kermes/soft+data/index.xml index 5b74d9e4..221289b4 100644 --- a/projects_links/kermes/soft+data/index.xml +++ b/projects_links/kermes/soft+data/index.xml @@ -1 +1 @@ -kermes on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/kermes/soft+data/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +kermes on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/kermes/soft+data/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/kermes/stays/index.xml b/projects_links/kermes/stays/index.xml index a4f6f4a2..5055b653 100644 --- a/projects_links/kermes/stays/index.xml +++ b/projects_links/kermes/stays/index.xml @@ -1 +1 @@ -kermes on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/kermes/stays/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +kermes on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/kermes/stays/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/kermes/teams/index.xml b/projects_links/kermes/teams/index.xml index 4f4e32b9..15044d5e 100644 --- a/projects_links/kermes/teams/index.xml +++ b/projects_links/kermes/teams/index.xml @@ -1 +1 @@ -kermes on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/kermes/teams/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +kermes on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/kermes/teams/Recent content in kermes on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/opensr/data/index.xml b/projects_links/opensr/data/index.xml index 979367fe..a2d37407 100644 --- a/projects_links/opensr/data/index.xml +++ b/projects_links/opensr/data/index.xml @@ -1 +1 @@ -opensr on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/opensr/data/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +opensr on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/opensr/data/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/opensr/index.xml b/projects_links/opensr/index.xml index 8e7a5056..6443b12c 100644 --- a/projects_links/opensr/index.xml +++ b/projects_links/opensr/index.xml @@ -1 +1 @@ -opensr on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/opensr/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +opensr on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/opensr/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/opensr/methods/index.xml b/projects_links/opensr/methods/index.xml index 1e905321..e463d029 100644 --- a/projects_links/opensr/methods/index.xml +++ b/projects_links/opensr/methods/index.xml @@ -1 +1 @@ -opensr on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/opensr/methods/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +opensr on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/opensr/methods/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/opensr/publication/index.xml b/projects_links/opensr/publication/index.xml index 9aca6577..b66e1235 100644 --- a/projects_links/opensr/publication/index.xml +++ b/projects_links/opensr/publication/index.xml @@ -1 +1 @@ -opensr on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/opensr/publication/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +opensr on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/opensr/publication/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/opensr/use_cases/index.xml b/projects_links/opensr/use_cases/index.xml index 18759162..6ea5580f 100644 --- a/projects_links/opensr/use_cases/index.xml +++ b/projects_links/opensr/use_cases/index.xml @@ -1 +1 @@ -opensr on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/opensr/use_cases/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +opensr on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/opensr/use_cases/Recent content in opensr on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/documents/index.xml b/projects_links/sedal/documents/index.xml index 4c81bb66..75006a43 100644 --- a/projects_links/sedal/documents/index.xml +++ b/projects_links/sedal/documents/index.xml @@ -1 +1 @@ -sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/documents/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/documents/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/index.xml b/projects_links/sedal/index.xml index 5a9c95b2..8d6ed8f8 100644 --- a/projects_links/sedal/index.xml +++ b/projects_links/sedal/index.xml @@ -1 +1 @@ -Sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/Recent content in Sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +Sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/Recent content in Sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/people/index.xml b/projects_links/sedal/people/index.xml index 94bc4796..d1d3b475 100644 --- a/projects_links/sedal/people/index.xml +++ b/projects_links/sedal/people/index.xml @@ -1 +1 @@ -sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/people/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/people/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/publications/books/index.xml b/projects_links/sedal/publications/books/index.xml index f7b374f1..cfe17b55 100644 --- a/projects_links/sedal/publications/books/index.xml +++ b/projects_links/sedal/publications/books/index.xml @@ -1 +1 @@ -sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/publications/books/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/publications/books/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/publications/conferences/index.xml b/projects_links/sedal/publications/conferences/index.xml index 9af2d9ba..7293d61f 100644 --- a/projects_links/sedal/publications/conferences/index.xml +++ b/projects_links/sedal/publications/conferences/index.xml @@ -1 +1 @@ -sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/publications/conferences/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/publications/conferences/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/publications/journals/index.xml b/projects_links/sedal/publications/journals/index.xml index 5ebdc88d..345c322a 100644 --- a/projects_links/sedal/publications/journals/index.xml +++ b/projects_links/sedal/publications/journals/index.xml @@ -1 +1 @@ -sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/publications/journals/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/publications/journals/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/projects_links/sedal/publications/talks/index.xml b/projects_links/sedal/publications/talks/index.xml index 74789719..b5797561 100644 --- a/projects_links/sedal/publications/talks/index.xml +++ b/projects_links/sedal/publications/talks/index.xml @@ -1 +1 @@ -sedal on ISP - Image and Signal Processing grouphttps://ipl-uv.github.io/projects_links/sedal/publications/talks/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file +sedal on ISP - Image and Signal Processing grouphttps://isp.uv.es/github/projects_links/sedal/publications/talks/Recent content in sedal on ISP - Image and Signal Processing groupHugoen-us \ No newline at end of file diff --git a/publications/books/index.html b/publications/books/index.html index a695a58c..0cfff3a7 100644 --- a/publications/books/index.html +++ b/publications/books/index.html @@ -2,7 +2,7 @@

Books and book chapters