From 23b066ac4e5b0cbadda54df7eb682a0cdea06c16 Mon Sep 17 00:00:00 2001 From: Carlos Gomez Date: Fri, 23 Sep 2016 00:27:00 +0200 Subject: [PATCH] VIP version 0.5.2 changes: - opencv dependency is now optional (if cv2 is not installed VIP will use ndimage or skimage) - documentation/webpage updated --- VERSION | 2 +- docs/source/faq.rst | 125 +++++++++++++++------------ docs/source/index.rst | 51 ++++------- docs/source/install.rst | 56 +++++++----- readme.rst | 12 +-- requirements | 17 ++++ setup.py | 61 ++++++------- vip/calib/badpixremoval.py | 173 ++++++++++++++++++++++++------------- vip/calib/derotation.py | 88 +++++++++++++++---- vip/calib/recentering.py | 29 +++++-- vip/calib/rescaling.py | 158 +++++++++++++++++++++------------ vip/pca/utils.py | 1 - 12 files changed, 475 insertions(+), 298 deletions(-) create mode 100644 requirements diff --git a/VERSION b/VERSION index 5d4294b9..2411653a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.5.1 \ No newline at end of file +0.5.2 \ No newline at end of file diff --git a/docs/source/faq.rst b/docs/source/faq.rst index ffaf4276..3e1c505c 100644 --- a/docs/source/faq.rst +++ b/docs/source/faq.rst @@ -1,68 +1,87 @@ FAQ ---- -- The VIP setup doesn't finish the job, it seems to be stuck. What do I do? - We have experienced a few times that the setup script hangs while installing - photutils. The reason why it crashes when compiling its own modules is unknown. - We recommend to kill the process (Ctrl + C) and restart it again by re-running - the setup command. A workaround is to install photutils before executing VIP - setup: +First things first. Please make sure you have the latest version of ``VIP``. +Please go and check the repository now! + + +On linux, why do I get a matplotlib related error when importing ``VIP``? +*ImportError: Matplotlib qt-based backends require an external PyQt4, PyQt5, +or PySide package to be installed, but it was not found.* +You may need to change the matplotlib backend. Find your matplotlibrc +configuration file and change the backend from WXAgg to TkAgg. More info here: +http://matplotlib.org/faq/usage_faq.html#what-is-a-backend. On linux the +matplotlibrc file is located in: + +.. code-block:: bash + + $HOME/.config/matplotlib/matplotlibrc + + +Why do I get, in OSX, the following RuntimeError? +*Python is not installed as a framework. The Mac OS X backend will not be able +to function correctly if Python is not installed as a framework. See the +Python documentation for more information on installing Python as a framework +on Mac OS X. Please either reinstall Python as a framework, or try one of the +other backends.* +Again, this is a matplotlib-backend related issue. Read the link in the previous +answer. It can be solved setting the backend to WXAgg or TkAgg. + + +The ``VIP`` setup.py script doesn't finish the job, it seems to be stuck. +What do I do? +This is very unlikely to happen with the latest versions of pip, setuptools +and ``VIP``'s setup script. Anyway, if this happens kill the process +(Ctrl + C) and start it again by re-running the setup command. A workaround +is to install the problematic dependency before executing ``VIP`` setup: .. code-block:: bash - $ conda install --channel https://conda.anaconda.org/astropy photutils + $ pip install + -- Why the setup fails complaining about the lack of a Fortran compiler? - Fortran compilers are apparently needed for compiling Scipy from source. Make - sure there is a Fortran compiler in your system. A workaround is to install - Scipy through conda before running the setup script: +Why the setup fails complaining about the lack of a Fortran compiler? +Fortran compilers are apparently needed for compiling ``Scipy`` from source. Make +sure there is a Fortran compiler in your system. A workaround is to install +``Scipy`` through conda before running the setup script: .. code-block:: bash $ conda install scipy -- Why do I get and error related to importing cv2 package when importing VIP? - cv2 is the name of opencv bindings for python. This library is needed for - fast image transformations. You have to install by following the - aforementioned instructions. - -- Why do I get a warning related to DS9/XPA when importing VIP? - Please make sure you have DS9 and XPA in your system path. Try installing it - with your system package management tool. - -- Why Python crashes when using some of the parallel functions, e.g. - *pca_adi_annular_quad* and *run_mcmc_astrometry*? - These functions require running SVD on several processes and this can be - problematic depending on the linear algebra libraries on your machine. We've - encountered this problem on OSX systems that use the ACCELERATE library for - linear algebra calculations (default in every OSX system). For this library - the multiprocessing is broken. A workaround is to compile Python against other - linear algebra library (e.g. OPENBLAS). An quick-n-easy fix is to install the - latest ANACONDA (2.5 or later) distribution which ships MKL library and - effectively replaces ACCELERATE on OSX systems. On linux with the default - LAPACK/BLAS libraries VIP successfully distributes the SVDs among all - the existing cores. - -- Why do I get, in linux, a matplotlib related error when importing VIP? - (Matplotlib backend_wx and backend_wxagg require wxPython >=2.8) - If you use Canopy python distro then this is caused by the combination - linux/Canopy. Nothing to do with the VIP pipeline. You may need to change the - matplotlib backend. Find your matplotlibrc configuration file and change the - backend from WXAgg to Qt4Agg. More info here: - http://matplotlib.org/faq/usage_faq.html#what-is-a-backend - -- Why do I get, in OSX, the RuntimeError shown below? - (Python is not installed as a framework. The Mac OS X backend will not be able - to function correctly if Python is not installed as a framework. See the - Python documentation for more information on installing Python as a framework - on Mac OS X. Please either reinstall Python as a framework, or try one of the - other backends.) - Again, this is a matplotlib-backend issue (not VIP related). Read the link in - the previous question. It can be solved setting the backend to WXAgg or TkAgg. - -- I get an error: ValueError: "unknown locale: UTF-8" when importing VIP. - It's not a VIP related problem. The problem must be solved if you add these - lines in your ~/.bash_profile: + +Why do I get and error related to importing ``cv2`` package when importing ``VIP``? +``cv2`` is the name of ``Opencv`` bindings for python. This library is needed for +fast image transformations, it is the default library used although it is optional. +You can install it with conda: + +.. code-block:: bash + + $ conda install opencv + + +Why do I get a warning related to ``DS9/XPA`` when importing ``VIP``? +Please make sure you have ``DS9`` and ``XPA`` in your system path. Try installing +them using your system's package management tool. + + +Why Python crashes when using some of the parallel functions, e.g. +``pca_adi_annular`` and ``run_mcmc_astrometry``? +These functions require running SVD on several processes and this can be +problematic depending on the linear algebra libraries on your machine. We've +encountered this problem on OSX systems that use the ACCELERATE library for +linear algebra calculations (default in every OSX system). For this library +the multiprocessing is broken. A workaround is to compile Python against other +linear algebra library (e.g. OPENBLAS). An quick-n-easy fix is to install the +latest ANACONDA (2.5 or later) distribution which ships MKL library and +effectively replaces ACCELERATE on OSX systems. On linux with the default +LAPACK/BLAS libraries ``VIP`` successfully distributes the SVDs among all +the existing cores. + + +I get an error: ValueError: "unknown locale: UTF-8" when importing ``VIP``. +It's not ``VIP``'s fault. The problem must be solved if you add these lines in +your ~/.bash_profile: .. code-block:: bash diff --git a/docs/source/index.rst b/docs/source/index.rst index 38f81e99..2276fbd5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -38,8 +38,8 @@ the github repository. The goal of ``VIP`` is to incorporate robust, efficient, easy-to-use, well-documented and open source implementations of high-contrast image processing algorithms to -the interested scientific community. The main repository of ``VIP`` resides on Github, -the standard for scientific open source code distribution, using Git as a +the interested scientific community. The main repository of ``VIP`` resides on +Github, the standard for scientific open source code distribution, using Git as a version control system. Github is a repository hosting service with an amazing web front-end for source-code management and collaboration. It provides features such as access control, bug tracking, feature requests, task management, and @@ -48,11 +48,12 @@ wikis for every project. ``VIP`` is being developed in Python 2.7, a modern open source high-level programming language, able to express a large amount of functionality per line of code. Python has a vast ecosystem of scientific open source libraries/packages (e.g. -numpy, scipy, astropy, scikit-learn, scikit-image) and many well-known libraries -have python bindings as well (e.g. opencv). On top of that exist a great tool, -the Jupyter (né IPython) notebook. A notebook file is simple a JSON document, -containing text, source code, rich media output, and metadata. It allows to -combine data analysis and visualization into an easily sharable format. +``numpy``, ``scipy``, ``astropy``, ``scikit-learn``, ``scikit-image``) and many +well-known libraries have python bindings as well (e.g. ``opencv``). On top of +that exist a great tool, the ``Jupyter`` (né ``IPython``) notebook. A notebook +file is simple a JSON document, containing text, source code, rich media output, +and metadata. It allows to combine data analysis and visualization into an +easily sharable format. Jupyter notebook tutorial @@ -65,38 +66,18 @@ Jupyter notebook tutorial Quick Setup Guide ------------------ -Get the code from github by downloading it as a zip file or by cloning the -repository (make sure your system has git installed): +Run: .. code-block:: bash - $ git clone https://github.com/vortex-exoplanet/VIP.git - -Install opencv. If you use Anaconda run: - -.. code-block:: bash - - $ conda install opencv - -From the root of the ``VIP`` package: - -.. code-block:: bash - - $ python setup.py develop - -Start Python (IPython or Jupyter notebook) and check if the setup worked fine by -importing ``VIP``: - -.. code-block:: python - - import vip - + $ pip install git+https://github.com/vortex-exoplanet/VIP.git Installation and dependencies ------------------------------ -You can find a more detailed description of ``VIP``'s dependencies here. +Here you can find a more detailed description of ``VIP``'s dependencies, how to +clone the repository and install ``Opencv`` (optional). .. toctree:: :maxdepth: 2 @@ -106,8 +87,8 @@ You can find a more detailed description of ``VIP``'s dependencies here. Frequently asked questions --------------------------- -Check out this questions if you find problems when installing or running ``VIP`` for -the first time. +Check out this questions if you find problems when installing or running ``VIP`` +for the first time. .. toctree:: :maxdepth: 2 @@ -126,14 +107,14 @@ image rotation, pixel temporal and spatial subsampling. On top of that several pre-processing functions are available such as re-centering and bad frame detection. -For Angular differential imaging (ADI) data several point spread gunction +For Angular differential imaging (ADI) data several point spread function subtraction techniques are available, from a simple median subtraction to different low-rank approximations. Principal Component Analysis (PCA) methods are implemented in different flavours. Also PCA can process RDI and multiple channel SDI (IFS) data. ``VIP`` contains a first version of the Local Low-rank plus Sparse plus Gaussian-noise decomposition (LLSG, Gomez Gonzalez et al. 2016). -Functions for Signal-to-noise ratio (S/R) estimation, S/R map generation are +Functions for Signal-to-noise ratio (S/N) estimation and S/N map generation are included. Flux and position of point-like sources are estimated using the Negative Fake diff --git a/docs/source/install.rst b/docs/source/install.rst index d5a56fab..3eae8207 100644 --- a/docs/source/install.rst +++ b/docs/source/install.rst @@ -8,39 +8,49 @@ Homebrew/MacPorts/Fink for OSX. I personally recommend using Anaconda which you can find here: https://www.continuum.io/downloads. A setup.py file (Setuptools Python package) is included in the root folder of -VIP. It takes care of installing most of the requirements for you. VIP depends on -existing packages from the Python ecosystem, such as numpy, scipy, matplotlib, -pandas, astropy, scikit-learn, scikit-image, photutils, emcee, etc. - -VIP ships a stripped-down version of RO.DS9 (Russell Owen) for convenient -xpaset/xpaget based interaction with DS9. VIP contains a class vipDS9 that works -on top of RO.DS9 containing several useful methods for DS9 control such as -displaying arrays, manipulating regions, controlling the display options, etc. -VipDS9 functionality will only be available if you have DS9 and XPA installed -on your system PATH. - -Opencv (Open source Computer Vision) provides fast c++ image processing -operations and is used by VIP (whenever it's possible) instead of python based -libraries such as ndimage or scikit-image. This dependency is tricky to get -install/compile, but having Anaconda python distribution the process takes just -a command provided that you have a C compiler installed in your system, like g++. -We'll use conda (core component of Anaconda) in this shell/terminal command: +VIP. It takes care of installing most of the requirements for you. ``VIP`` depends +on existing packages from the Python ecosystem, such as ``numpy``, ``scipy``, +``matplotlib``, ``pandas``, ``astropy``, ``scikit-learn``, ``scikit-image``, +``photutils``, ``emcee``, etc. + +``VIP`` ships a stripped-down version of ``RO.DS9`` (by Russell Owen) for convenient +``xpaset/xpaget`` based interaction with ``DS9``. ``VIP`` contains a class +``vipDS9`` that works on top of ``RO.DS9`` containing several useful methods for +``DS9`` control such as displaying arrays, manipulating regions, controlling the +display options, etc. ``VipDS9`` functionality will only be available if you have +``DS9`` and ``XPA`` installed on your system PATH. + +First, get the code from github by downloading it as a zip file or by cloning the +repository (make sure your system has git installed): + +.. code-block:: bash + + $ git clone https://github.com/vortex-exoplanet/VIP.git + +``Opencv`` (Open source Computer Vision) provides fast c++ image processing +operations and is used by ``VIP`` for basic image transformations. Starting from +version 0.5.2 of ``VIP`` the dependency on ``opencv`` is optional. If you don't +have/want the ``opencv`` python bindings, ``VIP`` will use the much slower +``ndimage/scikit-image`` libraries transparently. Installing ``opencv`` library +is tricky, but having Anaconda python distribution (``conda`` package manager) +the process takes just a command (this is an optional step): .. code-block:: bash $ conda install opencv -For installing VIP in your system run setup.py: +For installing ``VIP`` in your system run setup.py: .. code-block:: bash $ python setup.py install -The code is in continuous development and will be changing often. It's preferred -to 'install' with the develop flag: +Wait a minute until all the requirements are satisfied. Finally start Python +(or IPython/Jupyter notebook if you prefer) and check that you are able to +import ``VIP``: -.. code-block:: bash +.. code-block:: python - $ python setup.py develop + import vip -In any case wait a couple of minutes until all the requirements are satisfied. +Hopefully you will see the welcome message of ``VIP``. \ No newline at end of file diff --git a/readme.rst b/readme.rst index 161cced9..3c79f0d1 100644 --- a/readme.rst +++ b/readme.rst @@ -35,15 +35,11 @@ Documentation can be found in http://vip.readthedocs.io/. Quick Setup Guide ------------------ -Install opencv. If you use Anaconda run: +Run: .. code-block:: bash - - $ conda install opencv -From the root of the VIP package: - -.. code-block:: bash - - $ python setup.py develop + $ pip install git+https://github.com/vortex-exoplanet/VIP.git +Read the documentation for a detailed installation procedure (and optionally, +how to install ``Opencv``). \ No newline at end of file diff --git a/requirements b/requirements new file mode 100644 index 00000000..c51e4e1d --- /dev/null +++ b/requirements @@ -0,0 +1,17 @@ +cython +numpy >= 1.8 +scipy >= 0.17 +astropy >= 1.0.2 +photutils >= 0.2.2 +scikit-learn >= 0.17 +scikit-image >= 0.11 +emcee >= 2.1 +corner >= 1.0.2 +pandas >= 0.18 +matplotlib >= 1.4.3 +psutil +pytest +image_registration +FITS_tools +pywavelets +pyprind diff --git a/setup.py b/setup.py index 3e40f4b8..6986600f 100644 --- a/setup.py +++ b/setup.py @@ -1,53 +1,42 @@ #!/usr/bin/env python import os -try: - from setuptools import setup, find_packages -except ImportError: - from distutils.core import setup - +from setuptools import setup, find_packages +from pip.req import parse_requirements +from setuptools.command.install import install + +class InstallReqs(install): + def run(self): + print(" ********************** ") + print(" *** Installing VIP *** ") + print(" ********************** ") + os.system('pip install -r requirements') + install.run(self) -readme = open('README.rst').read() -doclink = """ -Documentation -------------- -TO-DO. Meanwhile please rely on the docstrings and jupyter tutorial. -""" - PACKAGE_PATH = os.path.abspath(os.path.join(__file__, os.pardir)) +# parse_requirements() returns generator of pip.req.InstallRequirement objects +install_reqs = parse_requirements(os.path.join(PACKAGE_PATH, 'requirements'), + session=False) +requirements = [str(ir.req) for ir in install_reqs] + with open(os.path.join(PACKAGE_PATH, 'VERSION')) as version_file: __version__ = version_file.read().strip() + +with open(os.path.join(PACKAGE_PATH, 'readme.rst')) as readme_file: + readme = readme_file.read() setup( name='vip', version=__version__, - description='Package for astronomical high-contrast image processing and exoplanet detection.', - long_description=readme + '\n\n' + doclink + '\n\n', - author='Carlos Gomez', + description='Package for astronomical high-contrast image processing.', + long_description=readme, + author='Carlos Gomez Gonzalez', author_email='cgomez@ulg.ac.be', - url='https://github.com/vortex-exoplanet/VIP', + url='https://github.com/vortex-exoplanet/VIP', + cmdclass={'install': InstallReqs}, packages=find_packages(), - include_package_data=True, - install_requires=['cython', - 'numpy >= 1.8', - 'scipy >= 0.17', - 'ipython >= 3.2', - 'jupyter', - 'astropy >= 1.0.2', - 'emcee >= 2.1', - 'corner >= 1.0.2', - 'pandas >= 0.18', - 'matplotlib >= 1.4.3', - 'scikit-learn >= 0.17', - 'scikit-image >= 0.11', - 'psutil', - 'pytest', - 'photutils >= 0.2.2', - 'image_registration', - 'FITS_tools', - 'pywavelets', - 'pyprind'], + install_requires=requirements, zip_safe=False, classifiers=['Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', diff --git a/vip/calib/badpixremoval.py b/vip/calib/badpixremoval.py index 79a4c4d6..6ca6155a 100644 --- a/vip/calib/badpixremoval.py +++ b/vip/calib/badpixremoval.py @@ -7,11 +7,11 @@ from __future__ import division __author__ = 'C. Gomez @ ULg', 'V. Christiaens' -__all__ = ['cube_fix_badpix_isolated', +__all__ = ['frame_fix_badpix_isolated', + 'cube_fix_badpix_isolated', 'cube_fix_badpix_annuli', 'cube_fix_badpix_clump'] -import cv2 import numpy as np import pyprind from skimage.draw import circle, ellipse @@ -21,8 +21,89 @@ from ..var import dist, frame_center, pp_subplots from ..stats import clip_array from ..conf import timing, timeInit - - + + +def frame_fix_badpix_isolated(array, bpm_mask=None, sigma_clip=3, num_neig=5, + size=5, protect_mask=False, radius=30, verbose=True, + debug=False): + """ Corrects the bad pixels, marked in the bad pixel mask. The bad pixel is + replaced by the median of the adjacent pixels. This function is very fast + but works only with isolated (sparse) pixels. + + Parameters + ---------- + array : array_like + Input 2d array. + bpm_mask : array_like, optional + Input bad pixel map. Zeros frame where the bad pixels have a value of 1. + If None is provided a bad pixel map will be created per frame using + sigma clip statistics. In the case of a cube the bad pixels will be + computed on the mean frame of the stack. + sigma_clip : int, optional + In case no bad pixel mask is provided all the pixels above and below + sigma_clip*STDDEV will be marked as bad. + num_neig : int, optional + The side of the square window around each pixel where the sigma clipped + statistics are calculated (STDDEV and MEDIAN). If the value is equal to + 0 then the statistics are computed in the whole frame. + size : odd int, optional + The size the box (size x size) of adjacent pixels for the median filter. + protect_mask : {False, True}, bool optional + If True a circular aperture at the center of the frames will be + protected from any operation. With this we protect the star and vicinity. + radius : int, optional + Radius of the circular aperture (at the center of the frames) for the + protection mask. + verbose : {True, False}, bool optional + If True additional information will be printed. + debug : {False, True}, bool optional + If debug is True, the bpm_mask and the input array are plotted. If the + input array is a cube, a long output is to be expected. Better check the + results with single images. + + Return + ------ + frame : array_like + Frame with bad pixels corrected. + """ + if not array.ndim == 2: + raise TypeError('Array is not a 2d array or single frame') + if size % 2 == 0: + raise TypeError('Size of the median blur kernel must be an odd integer') + + if bpm_mask is not None: + bpm_mask = bpm_mask.astype('bool') + + if verbose: start = timeInit() + + if num_neig > 0: + neigh = True + else: + neigh = False + + frame = array.copy() + cy, cx = frame_center(frame) + if bpm_mask is None: + ind = clip_array(frame, sigma_clip, sigma_clip, neighbor=neigh, + num_neighbor=num_neig, mad=True) + bpm_mask = np.zeros_like(frame) + bpm_mask[ind] = 1 + if protect_mask: + cir = circle(cy, cx, radius) + bpm_mask[cir] = 0 + bpm_mask = bpm_mask.astype('bool') + if debug: pp_subplots(frame, bpm_mask, title='Frame / Bad pixel mask') + + smoothed = median_filter(frame, size, mode='nearest') + frame[np.where(bpm_mask)] = smoothed[np.where(bpm_mask)] + array_out = frame + + if verbose: + print "\nDone replacing bad pixels using the median of the neighbors" + timing(start) + return array_out + + def cube_fix_badpix_isolated(array, bpm_mask=None, sigma_clip=3, num_neig=5, size=5, protect_mask=False, radius=30, verbose=True, debug=False): @@ -33,7 +114,7 @@ def cube_fix_badpix_isolated(array, bpm_mask=None, sigma_clip=3, num_neig=5, Parameters ---------- array : array_like - Input 2d or 3d array. + Input 3d array. bpm_mask : array_like, optional Input bad pixel map. Zeros frame where the bad pixels have a value of 1. If None is provided a bad pixel map will be created per frame using @@ -63,11 +144,11 @@ def cube_fix_badpix_isolated(array, bpm_mask=None, sigma_clip=3, num_neig=5, Return ------ - frame : array_like - Frame with bad pixels corrected. + array_out : array_like + Cube with bad pixels corrected. """ - if array.ndim>3 or array.ndim<2: - raise TypeError('Array is not a 2d or 3d array') + if not array.ndim == 3: + raise TypeError('Array is not a 3d array or cube') if size%2==0: raise TypeError('Size of the median blur kernel must be an odd integer') @@ -78,59 +159,33 @@ def cube_fix_badpix_isolated(array, bpm_mask=None, sigma_clip=3, num_neig=5, if num_neig>0: neigh=True else: neigh=False - - if array.ndim==2: - frame = array.copy() - cy, cx = frame_center(frame) - if bpm_mask is None: - ind = clip_array(frame, sigma_clip, sigma_clip, neighbor=neigh, - num_neighbor=num_neig, mad=True) - bpm_mask = np.zeros_like(frame) - bpm_mask[ind] = 1 - if protect_mask: - cir = circle(cy, cx, radius) - bpm_mask[cir] = 0 - bpm_mask = bpm_mask.astype('bool') - if debug: pp_subplots(frame, bpm_mask) - - if size==3 or size==5: - smoothed = cv2.medianBlur(frame.astype(np.float32), size) - elif size>5: - smoothed = median_filter(frame, size, mode='nearest') - frame[np.where(bpm_mask)] = smoothed[np.where(bpm_mask)] - array_out = frame - elif array.ndim==3: - cy, cx = frame_center(array[0]) - cube_out = array.copy() - n_frames = array.shape[0] - - if bpm_mask is None: - ind = clip_array(np.mean(array, axis=0), sigma_clip, sigma_clip, - neighbor=neigh, num_neighbor=num_neig, mad=True) - bpm_mask = np.zeros_like(array[0]) - bpm_mask[ind] = 1 - if protect_mask: - cir = circle(cy, cx, radius) - bpm_mask[cir] = 0 - bpm_mask = bpm_mask.astype('bool') - - if debug: pp_subplots(bpm_mask) - - bar = pyprind.ProgBar(n_frames, stream=1, title='Looping through frames') - for i in range(n_frames): - frame = cube_out[i] - if size==3 or size==5: - smoothed = cv2.medianBlur(frame.astype(np.float32), size) - elif size>5: - smoothed = median_filter(frame, size, mode='nearest') - frame[np.where(bpm_mask)] = smoothed[np.where(bpm_mask)] - bar.update() - array_out = cube_out + cy, cx = frame_center(array[0]) + cube_out = array.copy() + n_frames = array.shape[0] + + if bpm_mask is None: + ind = clip_array(np.mean(array, axis=0), sigma_clip, sigma_clip, + neighbor=neigh, num_neighbor=num_neig, mad=True) + bpm_mask = np.zeros_like(array[0]) + bpm_mask[ind] = 1 + if protect_mask: + cir = circle(cy, cx, radius) + bpm_mask[cir] = 0 + bpm_mask = bpm_mask.astype('bool') + + if debug: pp_subplots(bpm_mask, title='Bad pixel mask') + + bar = pyprind.ProgBar(n_frames, stream=1, title='Looping through frames') + for i in range(n_frames): + frame = cube_out[i] + smoothed = median_filter(frame, size, mode='nearest') + frame[np.where(bpm_mask)] = smoothed[np.where(bpm_mask)] + bar.update() + array_out = cube_out if verbose: - print - print "Done replacing bad pixels using the median of the neighbors" + print "/nDone replacing bad pixels using the median of the neighbors" timing(start) return array_out diff --git a/vip/calib/derotation.py b/vip/calib/derotation.py index fbf9ca5b..da40b98c 100644 --- a/vip/calib/derotation.py +++ b/vip/calib/derotation.py @@ -8,11 +8,21 @@ 'frame_rotate'] import numpy as np -import cv2 +import warnings +try: + import cv2 + no_opencv = False +except ImportError: + msg = "Opencv python binding are missing (consult VIP documentation for " + msg += "Opencv installation instructions). Scikit-image will be used instead." + warnings.warn(msg, ImportWarning) + no_opencv = True + +from skimage.transform import rotate from ..var import frame_center -def frame_rotate(array, angle, interpolation='bicubic', cxy=None): +def frame_rotate(array, angle, imlib='opencv', interpolation='bicubic', cxy=None): """ Rotates a frame. Parameters @@ -21,6 +31,9 @@ def frame_rotate(array, angle, interpolation='bicubic', cxy=None): Input frame, 2d array. angle : float Rotation angle. + imlib : {'opencv', 'skimage'}, str optional + Library used for image transformations. Opencv is faster than ndimage or + skimage. interpolation : {'bicubic', 'bilinear', 'nearneig'}, optional 'nneighbor' stands for nearest-neighbor interpolation, 'bilinear' stands for bilinear interpolation, @@ -48,26 +61,53 @@ def frame_rotate(array, angle, interpolation='bicubic', cxy=None): cy, cx = frame_center(array) else: cx, cy = cxy - - if interpolation == 'bilinear': - intp = cv2.INTER_LINEAR - elif interpolation == 'bicubic': - intp= cv2.INTER_CUBIC - elif interpolation == 'nearneig': - intp = cv2.INTER_NEAREST + + if imlib not in ['skimage', 'opencv']: + raise ValueError('Imlib not recognized, try opencv or ndimage') + + if imlib=='skimage' or no_opencv: + if interpolation == 'bilinear': + order = 1 + elif interpolation == 'bicubic': + order = 3 + elif interpolation == 'nearneig': + order = 0 + else: + raise TypeError('Interpolation method not recognized.') + + min_val = np.min(array) + im_temp = array - min_val + max_val = np.max(im_temp) + im_temp /= max_val + + array_out = rotate(im_temp, angle, order=order, center=cxy, cval=np.nan) + + array_out *= max_val + array_out += min_val + array_out = np.nan_to_num(array_out) + else: - raise TypeError('Interpolation method not recognized.') - - M = cv2.getRotationMatrix2D((cx,cy), angle, 1) - array_out = cv2.warpAffine(array.astype(np.float32), M, (x, y), flags=intp) + if interpolation == 'bilinear': + intp = cv2.INTER_LINEAR + elif interpolation == 'bicubic': + intp= cv2.INTER_CUBIC + elif interpolation == 'nearneig': + intp = cv2.INTER_NEAREST + else: + raise TypeError('Interpolation method not recognized.') + + M = cv2.getRotationMatrix2D((cx,cy), angle, 1) + array_out = cv2.warpAffine(array.astype(np.float32), M, (x, y), flags=intp) return array_out -def cube_derotate(array, angle_list, cxy=None, nproc=1): - """ Rotates an ADI cube to a common north given a vector with the - corresponding parallactic angles for each frame of the sequence. By default - bicubic interpolation is used (opencv). +def cube_derotate(array, angle_list, imlib='opencv', interpolation='bicubic', + cxy=None): + """ Rotates an cube (3d array or image sequence) providing a vector or + corrsponding angles. Serves for rotating an ADI sequence to a common north + given a vector with the corresponding parallactic angles for each frame. By + default bicubic interpolation is used (opencv). Parameters ---------- @@ -75,6 +115,16 @@ def cube_derotate(array, angle_list, cxy=None, nproc=1): Input 3d array, cube. angle_list : list Vector containing the parallactic angles. + imlib : {'opencv', 'skimage'}, str optional + Library used for image transformations. Opencv is usually faster than + ndimage or skimage. + interpolation : {'bicubic', 'bilinear', 'nearneig'}, optional + 'nneighbor' stands for nearest-neighbor interpolation, + 'bilinear' stands for bilinear interpolation, + 'bicubic' for interpolation over 4x4 pixel neighborhood. + The 'bicubic' is the default. The 'nearneig' is the fastest method and + the 'bicubic' the slowest of the three. The 'nearneig' is the poorer + option for interpolation of noisy astronomical images. cxy : tuple of int, optional Coordinates X,Y of the point with respect to which the rotation will be performed. By default the rotation is done with respect to the center @@ -98,8 +148,8 @@ def cube_derotate(array, angle_list, cxy=None, nproc=1): cxy = (cx, cy) for i in xrange(n_frames): - array_der[i] = frame_rotate(array[i], -angle_list[i], - interpolation='bicubic', cxy=cxy) + array_der[i] = frame_rotate(array[i], -angle_list[i], imlib=imlib, + interpolation=interpolation, cxy=cxy) return array_der diff --git a/vip/calib/recentering.py b/vip/calib/recentering.py index e31245d7..46194d69 100644 --- a/vip/calib/recentering.py +++ b/vip/calib/recentering.py @@ -17,10 +17,20 @@ 'cube_recenter_moffat2d_fit'] import numpy as np -import cv2 +import warnings import pywt import itertools as itt import pyprind + +try: + import cv2 + no_opencv = False +except ImportError: + msg = "Opencv python binding are missing (consult VIP documentation for " + msg += "Opencv installation instructions). Scipy.ndimage will be used instead." + warnings.warn(msg, ImportWarning) + no_opencv = True + from scipy.ndimage.interpolation import shift from skimage.transform import radon from multiprocessing import Pool, cpu_count @@ -33,7 +43,7 @@ get_annulus, pp_subplots, fit_2dmoffat, fit_2dgaussian) -def frame_shift(array, shift_y, shift_x, lib='opencv', interpolation='bicubic'): +def frame_shift(array, shift_y, shift_x, imlib='opencv', interpolation='bicubic'): """ Shifts an 2d array by shift_y, shift_x. Boundaries are filled with zeros. Parameters @@ -42,8 +52,9 @@ def frame_shift(array, shift_y, shift_x, lib='opencv', interpolation='bicubic'): Input 2d array. shift_y, shift_x: float Shifts in x and y directions. - lib : {'opencv', 'ndimage'}, string optional - Whether to use opencv or ndimage library. + imlib : {'opencv', 'ndimage'}, string optional + Library used for image transformations. Opencv is faster than ndimage or + skimage. interpolation : {'bicubic', 'bilinear', 'nearneig'}, optional 'nneighbor' stands for nearest-neighbor interpolation, 'bilinear' stands for bilinear interpolation, @@ -62,8 +73,11 @@ def frame_shift(array, shift_y, shift_x, lib='opencv', interpolation='bicubic'): raise TypeError ('Input array is not a frame or 2d array') image = array.copy() + + if imlib not in ['ndimage', 'opencv']: + raise ValueError('Imlib not recognized, try opencv or ndimage') - if lib=='ndimage': + if imlib=='ndimage' or no_opencv: if interpolation == 'bilinear': intp = 1 elif interpolation == 'bicubic': @@ -75,7 +89,7 @@ def frame_shift(array, shift_y, shift_x, lib='opencv', interpolation='bicubic'): array_shifted = shift(image, (shift_y, shift_x), order=intp) - elif lib=='opencv': + else: if interpolation == 'bilinear': intp = cv2.INTER_LINEAR elif interpolation == 'bicubic': @@ -89,9 +103,6 @@ def frame_shift(array, shift_y, shift_x, lib='opencv', interpolation='bicubic'): y, x = image.shape M = np.float32([[1,0,shift_x],[0,1,shift_y]]) array_shifted = cv2.warpAffine(image, M, (x,y), flags=intp) - - else: - raise ValueError('Lib not recognized, try opencv or ndimage') return array_shifted diff --git a/vip/calib/rescaling.py b/vip/calib/rescaling.py index e18db594..c3a43a37 100644 --- a/vip/calib/rescaling.py +++ b/vip/calib/rescaling.py @@ -10,19 +10,29 @@ 'cube_rescaling', 'check_scal_vector'] -import cv2 import numpy as np +import warnings +try: + import cv2 + no_opencv = False +except ImportError: + msg = "Opencv python binding are missing (consult VIP documentation for " + msg += "Opencv installation instructions). Ndimage/Scikit-image will be used " \ + "instead." + warnings.warn(msg, ImportWarning) + no_opencv = True + +from skimage.transform import rescale from scipy.ndimage.interpolation import geometric_transform from ..var import frame_center - -def frame_px_resampling(array, scale, interpolation='bicubic', scale_y=None, - scale_x=None): +def frame_px_resampling(array, scale, imlib='opencv', interpolation='bicubic', + scale_y=None, scale_x=None): """ Resamples the pixels of a frame wrt to the center, changing the size of the frame. If 'scale' < 1 then the frame is downsampled and if - 'scale' > 1 then its pixels are upsampled. Uses opencv for maximum speed. - Several modes of interpolation are available. + 'scale' > 1 then its pixels are upsampled. Several modes of interpolation + are available. Parameters ---------- @@ -30,6 +40,9 @@ def frame_px_resampling(array, scale, interpolation='bicubic', scale_y=None, Input frame, 2d array. scale : float Scale factor for upsampling or downsampling the frame. + imlib : {'opencv', 'skimage'}, str optional + Library used for image transformations. Opencv is faster than ndimage or + skimage. interpolation : {'bicubic', 'bilinear', 'nearneig'}, optional 'nneighbor' stands for nearest-neighbor interpolation, 'bilinear' stands for bilinear interpolation, @@ -52,28 +65,58 @@ def frame_px_resampling(array, scale, interpolation='bicubic', scale_y=None, Output resampled frame. """ if not array.ndim == 2: - raise TypeError('Input array is not a frame or 2d array.') + raise TypeError('Input array is not a frame or 2d array') if scale_y is None: scale_y = scale if scale_x is None: scale_x = scale - - if interpolation == 'bilinear': - intp = cv2.INTER_LINEAR - elif interpolation == 'bicubic': - intp= cv2.INTER_CUBIC - elif interpolation == 'nearneig': - intp = cv2.INTER_NEAREST + + if imlib not in ['skimage', 'opencv']: + raise ValueError('Imlib not recognized, try opencv or ndimage') + + if imlib == 'skimage' or no_opencv: + if interpolation == 'bilinear': + order = 1 + elif interpolation == 'bicubic': + order = 3 + elif interpolation == 'nearneig': + order = 0 + else: + raise TypeError('Interpolation method not recognized.') + + min_val = np.min(array) + im_temp = array - min_val + max_val = np.max(im_temp) + im_temp /= max_val + + array_resc = rescale(im_temp, scale=(scale_y, scale_x), order=order) + + array_resc *= max_val + array_resc += min_val + else: - raise TypeError('Interpolation method not recognized.') - - array_resc = cv2.resize(array.astype(np.float32), (0,0), fx=scale_x, - fy=scale_y, interpolation=intp) + if interpolation == 'bilinear': + intp = cv2.INTER_LINEAR + elif interpolation == 'bicubic': + intp= cv2.INTER_CUBIC + elif interpolation == 'nearneig': + intp = cv2.INTER_NEAREST + else: + raise TypeError('Interpolation method not recognized.') + + array_resc = cv2.resize(array.astype(np.float32), (0,0), fx=scale_x, + fy=scale_y, interpolation=intp) + + # TODO: For conservation of flux (e.g. in aperture 1xFWHM), what to do when + # there is a different scaling factor in x and y? + if scale_y==scale_x: + array_resc /= scale ** 2 + return array_resc -def cube_px_resampling(array, scale, interpolation='bicubic', scale_y=None, - scale_x=None): +def cube_px_resampling(array, scale, imlib='opencv', interpolation='bicubic', + scale_y=None, scale_x=None): """ Wrapper of frame_px_resample() for resampling the frames of a cube with a single scale. Useful when we need to upsample (upsacaling) or downsample (pixel binning) a set of frames, e.g. an ADI cube. @@ -84,6 +127,9 @@ def cube_px_resampling(array, scale, interpolation='bicubic', scale_y=None, Input frame, 2d array. scale : float Scale factor for upsampling or downsampling the frames in the cube. + imlib : {'opencv', 'skimage'}, str optional + Library used for image transformations. Opencv is faster than ndimage or + skimage. interpolation : {'bicubic', 'bilinear', 'nearneig'}, optional 'nneighbor' stands for nearest-neighbor interpolation, 'bilinear' stands for bilinear interpolation, @@ -118,22 +164,19 @@ def cube_px_resampling(array, scale, interpolation='bicubic', scale_y=None, if scale_y is None: scale_y = scale if scale_x is None: scale_x = scale - array_resc = np.zeros((array.shape[0], int(round(array.shape[1]*scale_y)), - int(round(array.shape[2]*scale_x)))) + array_resc = [] for i in range(array.shape[0]): - array_resc[i] = frame_px_resampling(array[i], scale=scale, - interpolation=interpolation, - scale_y=scale_y, scale_x=scale_x) - array_resc[i] /= scale**2 + array_resc.append(frame_px_resampling(array[i], scale=scale, imlib=imlib, + interpolation=interpolation, + scale_y=scale_y, scale_x=scale_x)) - return array_resc + return np.array(array_resc) def frame_rescaling(array, ref_y=None, ref_x=None, scale=1.0, - method='geometric_transform', scale_y=None, scale_x=None): - """ - Function to rescale a frame by a factor 'scale', wrt a reference point + imlib='opencv', scale_y=None, scale_x=None): + """ Function to rescale a frame by a factor 'scale', wrt a reference point ref_pt which by default is the center of the frame (typically the exact location of the star). However, it keeps the same dimensions. It uses spline interpolation of order 3 to find the new values in the output array. @@ -144,12 +187,12 @@ def frame_rescaling(array, ref_y=None, ref_x=None, scale=1.0, Input frame, 2d array. ref_y, ref_x : float, optional Coordinates X,Y of the point with respect to which the rotation will be - performed. By default the rotation is done with respect to the center + performed. By default the rescaling is done with respect to the center of the frame; central pixel if frame has odd size. scale : float Scaling factor. If > 1, it will upsample the input array equally along y and x by this factor. - method: string, {'geometric_transform','cv2.warp_affine'}, optional + imlib: string, {'ndimage','opencv'}, optional String determining which library to apply to rescale. Both options use a spline of order 3 for interpolation. Opencv is a few order of magnitudes faster than ndimage. @@ -182,19 +225,26 @@ def _scale_func(output_coords,ref_y=0,ref_x=0, scaling=1.0, scaling_y=None, scaling_x = scaling return (ref_y+((output_coords[0]-ref_y)/scaling_y), ref_x+((output_coords[1]-ref_x)/scaling_x)) + #--------------------------------------------------------------------------- + if not array.ndim == 2: + raise TypeError('Input array is not a frame or 2d array.') - array_out = np.zeros_like(array) - - if not ref_y and not ref_x: ref_y, ref_x = frame_center(array) + if not ref_y and not ref_x: + ref_y, ref_x = frame_center(array) + + if imlib not in ['ndimage', 'opencv']: + raise ValueError('Imlib not recognized, try opencv or ndimage') - if method == 'geometric_transform': - geometric_transform(array, _scale_func, output_shape=array.shape, - output = array_out, - extra_keywords={'ref_y':ref_y,'ref_x':ref_x, - 'scaling':scale,'scaling_y':scale_y, - 'scaling_x':scale_x}) + if imlib == 'ndimage' or no_opencv: + outshap = array.shape + array_out = geometric_transform(array, _scale_func, output_shape=outshap, + extra_keywords={'ref_y':ref_y, + 'ref_x':ref_x, + 'scaling':scale, + 'scaling_y':scale_y, + 'scaling_x':scale_x}) - elif method == 'cv2.warp_affine': + else: if scale_y is None: scale_y = scale if scale_x is None: scale_x = scale M = np.array([[scale_x,0,(1.-scale_x)*ref_x], @@ -203,18 +253,18 @@ def _scale_func(output_coords,ref_y=0,ref_x=0, scaling=1.0, scaling_y=None, (array.shape[1], array.shape[0]), flags=cv2.INTER_CUBIC) - else: - msg="Pick a valid method: 'geometric_transform' or 'cv2.warp_affine'" - raise ValueError(msg) + # TODO: For conservation of flux (e.g. in aperture 1xFWHM), what to do when + # there is a different scaling factor in x and y? + if scale_y==scale_x: + array_out /= scale ** 2 return array_out def cube_rescaling(array, scaling_list, ref_y=None, ref_x=None, - method='cv2.warp_affine',scaling_y=None, scaling_x=None): - """ - Function to rescale a cube, frame by frame, by a factor 'scale', with + imlib='opencv', scaling_y=None, scaling_x=None): + """ Function to rescale a cube, frame by frame, by a factor 'scale', with respect to position (cy,cx). It calls frame_rescaling function. Parameters @@ -227,7 +277,7 @@ def cube_rescaling(array, scaling_list, ref_y=None, ref_x=None, Coordinates X,Y of the point with respect to which the rescaling will be performed. By default the rescaling is done with respect to the center of the frames; central pixel if the frames have odd size. - method: {'cv2.warp_affine', 'geometric_transform'}, optional + imlib: {'opencv', 'ndimage'}, optional String determining which library to apply to rescale. Both options use a spline of order 3 for interpolation. Opencv is a few order of magnitudes faster than ndimage. @@ -246,18 +296,18 @@ def cube_rescaling(array, scaling_list, ref_y=None, ref_x=None, Median combined image of the rescaled cube. """ if not array.ndim == 3: - raise TypeError('Input array is not a cube or 3d array.') + raise TypeError('Input array is not a cube or 3d array') + array_sc = np.zeros_like(array) if scaling_y is None: scaling_y = [None]*array.shape[0] if scaling_x is None: scaling_x = [None]*array.shape[0] if not ref_y and not ref_x: ref_y, ref_x = frame_center(array[0]) - for i in xrange(array.shape[0]): + for i in range(array.shape[0]): array_sc[i] = frame_rescaling(array[i], ref_y=ref_y, ref_x=ref_x, - scale=scaling_list[i], method=method, - scale_y=scaling_y[i], - scale_x=scaling_x[i]) + scale=scaling_list[i], imlib=imlib, + scale_y=scaling_y[i], scale_x=scaling_x[i]) array_out = np.median(array_sc, axis=0) return array_sc, array_out diff --git a/vip/pca/utils.py b/vip/pca/utils.py index b3dab112..4b0f410d 100644 --- a/vip/pca/utils.py +++ b/vip/pca/utils.py @@ -15,7 +15,6 @@ 'pca_annulus', 'scale_cube_for_pca'] -import cv2 import numpy as np from numpy import linalg from matplotlib import pyplot as plt