Hi!
I have a pre-processing pipeline file I use for MEG. I recently created a new virtual environment to work on new data:
- Python 3.12.4
- MNE 1.8.0
- Scipy 1.13.1
- Numpy 1.26.4
While trying to fit my data to an ICA, I got an error, executing the last line of the following:
ica = mne.preprocessing.ICA(n_components = 20, random_state = 97, method = "picard", fit_params = dict(ortho = True, extended = True))
ica.fit(raw_for_ICA, picks = "meg")
The error is:
File <decorator-gen-470>:10, in fit(self, inst, picks, start, stop, decim, reject, flat, tstep, reject_by_annotation, verbose)
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/mne/preprocessing/ica.py:736, in ICA.fit(self, inst, picks, start, stop, decim, reject, flat, tstep, reject_by_annotation, verbose)
733 self.ch_names = self.info["ch_names"]
735 if isinstance(inst, BaseRaw):
--> 736 self._fit_raw(
737 inst,
738 picks,
739 start,
740 stop,
741 decim,
742 reject,
743 flat,
744 tstep,
745 reject_by_annotation,
746 verbose,
747 )
748 else:
749 assert isinstance(inst, BaseEpochs)
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/mne/preprocessing/ica.py:813, in ICA._fit_raw(self, raw, picks, start, stop, decim, reject, flat, tstep, reject_by_annotation, verbose)
810 self.reject_ = None
812 self.n_samples_ = data.shape[1]
--> 813 self._fit(data, "raw")
815 return self
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/mne/preprocessing/ica.py:898, in ICA._fit(self, data, fit_type)
895 data = self._pre_whiten(data)
897 pca = _PCA(n_components=self._max_pca_components, whiten=True)
--> 898 data = pca.fit_transform(data.T)
899 use_ev = pca.explained_variance_ratio_
900 n_pca = self.n_pca_components
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/mne/utils/numerics.py:849, in _PCA.fit_transform(self, X, y)
847 def fit_transform(self, X, y=None):
848 X = X.copy()
--> 849 U, S, _ = self._fit(X)
850 U = U[:, : self.n_components_]
852 if self.whiten:
853 # X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/mne/utils/numerics.py:890, in _PCA._fit(self, X)
887 self.mean_ = np.mean(X, axis=0)
888 X -= self.mean_
--> 890 U, S, V = _safe_svd(X, full_matrices=False)
891 # flip eigenvectors' sign to enforce deterministic output
892 U, V = svd_flip(U, V)
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/mne/fixes.py:94, in _safe_svd(A, **kwargs)
92 raise ValueError("Cannot set overwrite_a=True with this function")
93 try:
---> 94 return linalg.svd(A, **kwargs)
95 except np.linalg.LinAlgError as exp:
96 from .utils import warn
File ~/.conda/envs/bodylingual/lib/python3.12/site-packages/scipy/linalg/_decomp_svd.py:130, in svd(a, full_matrices, compute_uv, overwrite_a, check_finite, lapack_driver)
128 sz = max(m * min_mn, n * min_mn)
129 if max(m * min_mn, n * min_mn) > numpy.iinfo(numpy.int32).max:
--> 130 raise ValueError(f"Indexing a matrix of {sz} elements would "
131 "incur an in integer overflow in LAPACK.")
133 funcs = (lapack_driver, lapack_driver + '_lwork')
134 gesXd, gesXd_lwork = get_lapack_funcs(funcs, (a1,), ilp64='preferred')
ValueError: Indexing a matrix of 2212380000 elements would incur an in integer overflow in LAPACK.
From my understanding, I get a matrix with 2,221,380,000 elements, which exceeds the limits of a 32-bit integer. Is there any way to use 64-bit integers to prevent this from happening?
Best,