from __future__ import division, print_function

import os
import sys
import pickle
import copy
import warnings
from os.path import join
from numpy.distutils import log
from distutils.dep_util import newer
from distutils.sysconfig import get_config_var
from numpy._build_utils.apple_accelerate import (
    uses_accelerate_framework, get_sgemv_fix
    )
from numpy.compat import npy_load_module
from setup_common import *

# Set to True to enable relaxed strides checking. This (mostly) means
# that `strides[dim]` is ignored if `shape[dim] == 1` when setting flags.
NPY_RELAXED_STRIDES_CHECKING = (os.environ.get('NPY_RELAXED_STRIDES_CHECKING', "0") != "0")

# XXX: ugly, we use a class to avoid calling twice some expensive functions in
# config.h/numpyconfig.h. I don't see a better way because distutils force
# config.h generation inside an Extension class, and as such sharing
# configuration informations between extensions is not easy.
# Using a pickled-based memoize does not work because config_cmd is an instance
# method, which cPickle does not like.
#
# Use pickle in all cases, as cPickle is gone in python3 and the difference
# in time is only in build. -- Charles Harris, 2013-03-30

class CallOnceOnly(object):
    def __init__(self):
        self._check_types = None
        self._check_ieee_macros = None
        self._check_complex = None

    def check_types(self, *a, **kw):
        if self._check_types is None:
            out = check_types(*a, **kw)
            self._check_types = pickle.dumps(out)
        else:
            out = copy.deepcopy(pickle.loads(self._check_types))
        return out

    def check_ieee_macros(self, *a, **kw):
        if self._check_ieee_macros is None:
            out = check_ieee_macros(*a, **kw)
            self._check_ieee_macros = pickle.dumps(out)
        else:
            out = copy.deepcopy(pickle.loads(self._check_ieee_macros))
        return out

    def check_complex(self, *a, **kw):
        if self._check_complex is None:
            out = check_complex(*a, **kw)
            self._check_complex = pickle.dumps(out)
        else:
            out = copy.deepcopy(pickle.loads(self._check_complex))
        return out

PYTHON_HAS_UNICODE_WIDE = True

def pythonlib_dir():
    """return path where libpython* is."""
    if sys.platform == 'win32':
        return os.path.join(sys.prefix, "libs")
    else:
        return get_config_var('LIBDIR')

def is_npy_no_signal():
    """Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
    header."""
    return sys.platform == 'win32'

def is_npy_no_smp():
    """Return True if the NPY_NO_SMP symbol must be defined in public
    header (when SMP support cannot be reliably enabled)."""
    # Perhaps a fancier check is in order here.
    #  so that threads are only enabled if there
    #  are actually multiple CPUS? -- but
    #  threaded code can be nice even on a single
    #  CPU so that long-calculating code doesn't
    #  block.
    return 'NPY_NOSMP' in os.environ

def win32_checks(deflist):
    from numpy.distutils.misc_util import get_build_architecture
    a = get_build_architecture()

    # Distutils hack on AMD64 on windows
    print('BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' %
          (a, os.name, sys.platform))
    if a == 'AMD64':
        deflist.append('DISTUTILS_USE_SDK')

    # On win32, force long double format string to be 'g', not
    # 'Lg', since the MS runtime does not support long double whose
    # size is > sizeof(double)
    if a == "Intel" or a == "AMD64":
        deflist.append('FORCE_NO_LONG_DOUBLE_FORMATTING')

def check_math_capabilities(config, moredefs, mathlibs):
    def check_func(func_name):
        return config.check_func(func_name, libraries=mathlibs,
                                 decl=True, call=True)

    def check_funcs_once(funcs_name):
        decl = dict([(f, True) for f in funcs_name])
        st = config.check_funcs_once(funcs_name, libraries=mathlibs,
                                     decl=decl, call=decl)
        if st:
            moredefs.extend([(fname2def(f), 1) for f in funcs_name])
        return st

    def check_funcs(funcs_name):
        # Use check_funcs_once first, and if it does not work, test func per
        # func. Return success only if all the functions are available
        if not check_funcs_once(funcs_name):
            # Global check failed, check func per func
            for f in funcs_name:
                if check_func(f):
                    moredefs.append((fname2def(f), 1))
            return 0
        else:
            return 1

    #use_msvc = config.check_decl("_MSC_VER")

    if not check_funcs_once(MANDATORY_FUNCS):
        raise SystemError("One of the required function to build numpy is not"
                " available (the list is %s)." % str(MANDATORY_FUNCS))

    # Standard functions which may not be available and for which we have a
    # replacement implementation. Note that some of these are C99 functions.

    # XXX: hack to circumvent cpp pollution from python: python put its
    # config.h in the public namespace, so we have a clash for the common
    # functions we test. We remove every function tested by python's
    # autoconf, hoping their own test are correct
    for f in OPTIONAL_STDFUNCS_MAYBE:
        if config.check_decl(fname2def(f),
                    headers=["Python.h", "math.h"]):
            OPTIONAL_STDFUNCS.remove(f)

    check_funcs(OPTIONAL_STDFUNCS)

    for h in OPTIONAL_HEADERS:
        if config.check_func("", decl=False, call=False, headers=[h]):
            moredefs.append((fname2def(h).replace(".", "_"), 1))

    for tup in OPTIONAL_INTRINSICS:
        headers = None
        if len(tup) == 2:
            f, args = tup
        else:
            f, args, headers = tup[0], tup[1], [tup[2]]
        if config.check_func(f, decl=False, call=True, call_args=args,
                             headers=headers):
            moredefs.append((fname2def(f), 1))

    for dec, fn in OPTIONAL_FUNCTION_ATTRIBUTES:
        if config.check_gcc_function_attribute(dec, fn):
            moredefs.append((fname2def(fn), 1))

    for fn in OPTIONAL_VARIABLE_ATTRIBUTES:
        if config.check_gcc_variable_attribute(fn):
            m = fn.replace("(", "_").replace(")", "_")
            moredefs.append((fname2def(m), 1))

    # C99 functions: float and long double versions
    check_funcs(C99_FUNCS_SINGLE)
    check_funcs(C99_FUNCS_EXTENDED)

def check_complex(config, mathlibs):
    priv = []
    pub = []

    try:
        if os.uname()[0] == "Interix":
            warnings.warn("Disabling broken complex support. See #1365")
            return priv, pub
    except:
        # os.uname not available on all platforms. blanket except ugly but safe
        pass

    # Check for complex support
    st = config.check_header('complex.h')
    if st:
        priv.append(('HAVE_COMPLEX_H', 1))
        pub.append(('NPY_USE_C99_COMPLEX', 1))

        for t in C99_COMPLEX_TYPES:
            st = config.check_type(t, headers=["complex.h"])
            if st:
                pub.append(('NPY_HAVE_%s' % type2def(t), 1))

        def check_prec(prec):
            flist = [f + prec for f in C99_COMPLEX_FUNCS]
            decl = dict([(f, True) for f in flist])
            if not config.check_funcs_once(flist, call=decl, decl=decl,
                                           libraries=mathlibs):
                for f in flist:
                    if config.check_func(f, call=True, decl=True,
                                         libraries=mathlibs):
                        priv.append((fname2def(f), 1))
            else:
                priv.extend([(fname2def(f), 1) for f in flist])

        check_prec('')
        check_prec('f')
        check_prec('l')

    return priv, pub

def check_ieee_macros(config):
    priv = []
    pub = []

    macros = []

    def _add_decl(f):
        priv.append(fname2def("decl_%s" % f))
        pub.append('NPY_%s' % fname2def("decl_%s" % f))

    # XXX: hack to circumvent cpp pollution from python: python put its
    # config.h in the public namespace, so we have a clash for the common
    # functions we test. We remove every function tested by python's
    # autoconf, hoping their own test are correct
    _macros = ["isnan", "isinf", "signbit", "isfinite"]
    for f in _macros:
        py_symbol = fname2def("decl_%s" % f)
        already_declared = config.check_decl(py_symbol,
                headers=["Python.h", "math.h"])
        if already_declared:
            if config.check_macro_true(py_symbol,
                    headers=["Python.h", "math.h"]):
                pub.append('NPY_%s' % fname2def("decl_%s" % f))
        else:
            macros.append(f)
    # Normally, isnan and isinf are macro (C99), but some platforms only have
    # func, or both func and macro version. Check for macro only, and define
    # replacement ones if not found.
    # Note: including Python.h is necessary because it modifies some math.h
    # definitions
    for f in macros:
        st = config.check_decl(f, headers=["Python.h", "math.h"])
        if st:
            _add_decl(f)

    return priv, pub

def check_types(config_cmd, ext, build_dir):
    private_defines = []
    public_defines = []

    # Expected size (in number of bytes) for each type. This is an
    # optimization: those are only hints, and an exhaustive search for the size
    # is done if the hints are wrong.
    expected = {'short': [2], 'int': [4], 'long': [8, 4],
                'float': [4], 'double': [8], 'long double': [16, 12, 8],
                'Py_intptr_t': [8, 4], 'PY_LONG_LONG': [8], 'long long': [8],
                'off_t': [8, 4]}

    # Check we have the python header (-dev* packages on Linux)
    result = config_cmd.check_header('Python.h')
    if not result:
        raise SystemError(
                "Cannot compile 'Python.h'. Perhaps you need to "
                "install python-dev|python-devel.")
    res = config_cmd.check_header("endian.h")
    if res:
        private_defines.append(('HAVE_ENDIAN_H', 1))
        public_defines.append(('NPY_HAVE_ENDIAN_H', 1))

    # Check basic types sizes
    for type in ('short', 'int', 'long'):
        res = config_cmd.check_decl("SIZEOF_%s" % sym2def(type), headers=["Python.h"])
        if res:
            public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), "SIZEOF_%s" % sym2def(type)))
        else:
            res = config_cmd.check_type_size(type, expected=expected[type])
            if res >= 0:
                public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
            else:
                raise SystemError("Checking sizeof (%s) failed !" % type)

    for type in ('float', 'double', 'long double'):
        already_declared = config_cmd.check_decl("SIZEOF_%s" % sym2def(type),
                                                 headers=["Python.h"])
        res = config_cmd.check_type_size(type, expected=expected[type])
        if res >= 0:
            public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
            if not already_declared and not type == 'long double':
                private_defines.append(('SIZEOF_%s' % sym2def(type), '%d' % res))
        else:
            raise SystemError("Checking sizeof (%s) failed !" % type)

        # Compute size of corresponding complex type: used to check that our
        # definition is binary compatible with C99 complex type (check done at
        # build time in npy_common.h)
        complex_def = "struct {%s __x; %s __y;}" % (type, type)
        res = config_cmd.check_type_size(complex_def,
                                         expected=[2 * x for x in expected[type]])
        if res >= 0:
            public_defines.append(('NPY_SIZEOF_COMPLEX_%s' % sym2def(type), '%d' % res))
        else:
            raise SystemError("Checking sizeof (%s) failed !" % complex_def)

    for type in ('Py_intptr_t', 'off_t'):
        res = config_cmd.check_type_size(type, headers=["Python.h"],
                library_dirs=[pythonlib_dir()],
                expected=expected[type])

        if res >= 0:
            private_defines.append(('SIZEOF_%s' % sym2def(type), '%d' % res))
            public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
        else:
            raise SystemError("Checking sizeof (%s) failed !" % type)

    # We check declaration AND type because that's how distutils does it.
    if config_cmd.check_decl('PY_LONG_LONG', headers=['Python.h']):
        res = config_cmd.check_type_size('PY_LONG_LONG',  headers=['Python.h'],
                library_dirs=[pythonlib_dir()],
                expected=expected['PY_LONG_LONG'])
        if res >= 0:
            private_defines.append(('SIZEOF_%s' % sym2def('PY_LONG_LONG'), '%d' % res))
            public_defines.append(('NPY_SIZEOF_%s' % sym2def('PY_LONG_LONG'), '%d' % res))
        else:
            raise SystemError("Checking sizeof (%s) failed !" % 'PY_LONG_LONG')

        res = config_cmd.check_type_size('long long',
                expected=expected['long long'])
        if res >= 0:
            #private_defines.append(('SIZEOF_%s' % sym2def('long long'), '%d' % res))
            public_defines.append(('NPY_SIZEOF_%s' % sym2def('long long'), '%d' % res))
        else:
            raise SystemError("Checking sizeof (%s) failed !" % 'long long')

    if not config_cmd.check_decl('CHAR_BIT', headers=['Python.h']):
        raise RuntimeError(
            "Config wo CHAR_BIT is not supported"
            ", please contact the maintainers")

    return private_defines, public_defines

def check_mathlib(config_cmd):
    # Testing the C math library
    mathlibs = []
    mathlibs_choices = [[], ['m'], ['cpml']]
    mathlib = os.environ.get('MATHLIB')
    if mathlib:
        mathlibs_choices.insert(0, mathlib.split(','))
    for libs in mathlibs_choices:
        if config_cmd.check_func("exp", libraries=libs, decl=True, call=True):
            mathlibs = libs
            break
    else:
        raise EnvironmentError("math library missing; rerun "
                               "setup.py after setting the "
                               "MATHLIB env variable")
    return mathlibs

def visibility_define(config):
    """Return the define value to use for NPY_VISIBILITY_HIDDEN (may be empty
    string)."""
    if config.check_compiler_gcc4():
        return '__attribute__((visibility("hidden")))'
    else:
        return ''

def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = npy_load_module('_'.join(n.split('.')),
                                     generate_umath_py, ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # C99 restrict keyword
            moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))

            # Inline check
            inline = config_cmd.check_inline()

            # Check whether we need our own wide character support
            if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']):
                PYTHON_HAS_UNICODE_WIDE = True
            else:
                PYTHON_HAS_UNICODE_WIDE = False

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Get long double representation
            if sys.platform != 'darwin':
                rep = check_long_double_representation(config_cmd)
                if rep in ['INTEL_EXTENDED_12_BYTES_LE',
                           'INTEL_EXTENDED_16_BYTES_LE',
                           'MOTOROLA_EXTENDED_12_BYTES_BE',
                           'IEEE_QUAD_LE', 'IEEE_QUAD_BE',
                           'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE',
                           'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']:
                    moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
                else:
                    raise ValueError("Unrecognized long double format: %s" % rep)

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f:
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put private include directory in build_dir on search path
        # allows using code generation in headers headers
        config.add_include_dirs(join(build_dir, "src", "private"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))

    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
    config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
    config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('src', 'npymath', '_signbit.c'),
            join('include', 'numpy', '*object.h'),
            join(codegen_dir, 'genapi.py'),
            ]

    #######################################################################
    #                            dummy module                             #
    #######################################################################

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    config.add_extension('_dummy',
                         sources=[join('src', 'dummymodule.c'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api]
                         )

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substition dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int main(void) { return 0;}')
        if not st:
            raise RuntimeError("Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [join('src', 'npymath', 'npy_math.c.src'),
                       join('src', 'npymath', 'ieee754.c.src'),
                       join('src', 'npymath', 'npy_math_complex.c.src'),
                       join('src', 'npymath', 'halffloat.c')
                       ]
    config.add_installed_library('npymath',
            sources=npymath_sources + [get_mathlib_info],
            install_dir='lib')
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
            subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config",
            subst_dict)

    #######################################################################
    #                         npysort library                             #
    #######################################################################

    # This library is created for the build but it is not installed
    npysort_sources = [join('src', 'npysort', 'quicksort.c.src'),
                       join('src', 'npysort', 'mergesort.c.src'),
                       join('src', 'npysort', 'heapsort.c.src'),
                       join('src', 'private', 'npy_partition.h.src'),
                       join('src', 'npysort', 'selection.c.src'),
                       join('src', 'private', 'npy_binsearch.h.src'),
                       join('src', 'npysort', 'binsearch.c.src'),
                       ]
    config.add_library('npysort',
                       sources=npysort_sources,
                       include_dirs=[])

    #######################################################################
    #                        multiarray module                            #
    #######################################################################

    # Multiarray version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_multiarray_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'multiarray')
        sources = [join(local_dir, subpath, 'scalartypes.c.src'),
                   join(local_dir, subpath, 'arraytypes.c.src'),
                   join(local_dir, subpath, 'nditer_templ.c.src'),
                   join(local_dir, subpath, 'lowlevel_strided_loops.c.src'),
                   join(local_dir, subpath, 'einsum.c.src'),
                   join(local_dir, 'src', 'private', 'templ_common.h.src')
                   ]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    multiarray_deps = [
            join('src', 'multiarray', 'arrayobject.h'),
            join('src', 'multiarray', 'arraytypes.h'),
            join('src', 'multiarray', 'array_assign.h'),
            join('src', 'multiarray', 'buffer.h'),
            join('src', 'multiarray', 'calculation.h'),
            join('src', 'multiarray', 'cblasfuncs.h'),
            join('src', 'multiarray', 'common.h'),
            join('src', 'multiarray', 'convert_datatype.h'),
            join('src', 'multiarray', 'convert.h'),
            join('src', 'multiarray', 'conversion_utils.h'),
            join('src', 'multiarray', 'ctors.h'),
            join('src', 'multiarray', 'descriptor.h'),
            join('src', 'multiarray', 'getset.h'),
            join('src', 'multiarray', 'hashdescr.h'),
            join('src', 'multiarray', 'iterators.h'),
            join('src', 'multiarray', 'mapping.h'),
            join('src', 'multiarray', 'methods.h'),
            join('src', 'multiarray', 'multiarraymodule.h'),
            join('src', 'multiarray', 'nditer_impl.h'),
            join('src', 'multiarray', 'numpymemoryview.h'),
            join('src', 'multiarray', 'number.h'),
            join('src', 'multiarray', 'numpyos.h'),
            join('src', 'multiarray', 'refcount.h'),
            join('src', 'multiarray', 'scalartypes.h'),
            join('src', 'multiarray', 'sequence.h'),
            join('src', 'multiarray', 'shape.h'),
            join('src', 'multiarray', 'ucsnarrow.h'),
            join('src', 'multiarray', 'usertypes.h'),
            join('src', 'multiarray', 'vdot.h'),
            join('src', 'private', 'npy_config.h'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'private', 'lowlevel_strided_loops.h'),
            join('src', 'private', 'mem_overlap.h'),
            join('src', 'private', 'npy_extint128.h'),
            join('include', 'numpy', 'arrayobject.h'),
            join('include', 'numpy', '_neighborhood_iterator_imp.h'),
            join('include', 'numpy', 'npy_endian.h'),
            join('include', 'numpy', 'arrayscalars.h'),
            join('include', 'numpy', 'noprefix.h'),
            join('include', 'numpy', 'npy_interrupt.h'),
            join('include', 'numpy', 'npy_3kcompat.h'),
            join('include', 'numpy', 'npy_math.h'),
            join('include', 'numpy', 'halffloat.h'),
            join('include', 'numpy', 'npy_common.h'),
            join('include', 'numpy', 'npy_os.h'),
            join('include', 'numpy', 'utils.h'),
            join('include', 'numpy', 'ndarrayobject.h'),
            join('include', 'numpy', 'npy_cpu.h'),
            join('include', 'numpy', 'numpyconfig.h'),
            join('include', 'numpy', 'ndarraytypes.h'),
            join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
            join('include', 'numpy', '_numpyconfig.h.in'),
            # add library sources as distuils does not consider libraries
            # dependencies
            ] + npysort_sources + npymath_sources

    multiarray_src = [
            join('src', 'multiarray', 'alloc.c'),
            join('src', 'multiarray', 'arrayobject.c'),
            join('src', 'multiarray', 'arraytypes.c.src'),
            join('src', 'multiarray', 'array_assign.c'),
            join('src', 'multiarray', 'array_assign_scalar.c'),
            join('src', 'multiarray', 'array_assign_array.c'),
            join('src', 'multiarray', 'buffer.c'),
            join('src', 'multiarray', 'calculation.c'),
            join('src', 'multiarray', 'compiled_base.c'),
            join('src', 'multiarray', 'common.c'),
            join('src', 'multiarray', 'convert.c'),
            join('src', 'multiarray', 'convert_datatype.c'),
            join('src', 'multiarray', 'conversion_utils.c'),
            join('src', 'multiarray', 'ctors.c'),
            join('src', 'multiarray', 'datetime.c'),
            join('src', 'multiarray', 'datetime_strings.c'),
            join('src', 'multiarray', 'datetime_busday.c'),
            join('src', 'multiarray', 'datetime_busdaycal.c'),
            join('src', 'multiarray', 'descriptor.c'),
            join('src', 'multiarray', 'dtype_transfer.c'),
            join('src', 'multiarray', 'einsum.c.src'),
            join('src', 'multiarray', 'flagsobject.c'),
            join('src', 'multiarray', 'getset.c'),
            join('src', 'multiarray', 'hashdescr.c'),
            join('src', 'multiarray', 'item_selection.c'),
            join('src', 'multiarray', 'iterators.c'),
            join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
            join('src', 'multiarray', 'mapping.c'),
            join('src', 'multiarray', 'methods.c'),
            join('src', 'multiarray', 'multiarraymodule.c'),
            join('src', 'multiarray', 'nditer_templ.c.src'),
            join('src', 'multiarray', 'nditer_api.c'),
            join('src', 'multiarray', 'nditer_constr.c'),
            join('src', 'multiarray', 'nditer_pywrap.c'),
            join('src', 'multiarray', 'number.c'),
            join('src', 'multiarray', 'numpymemoryview.c'),
            join('src', 'multiarray', 'numpyos.c'),
            join('src', 'multiarray', 'refcount.c'),
            join('src', 'multiarray', 'sequence.c'),
            join('src', 'multiarray', 'shape.c'),
            join('src', 'multiarray', 'scalarapi.c'),
            join('src', 'multiarray', 'scalartypes.c.src'),
            join('src', 'multiarray', 'usertypes.c'),
            join('src', 'multiarray', 'ucsnarrow.c'),
            join('src', 'multiarray', 'vdot.c'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'private', 'mem_overlap.c'),
            ]

    blas_info = get_info('blas_opt', 0)
    if blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', []):
        extra_info = blas_info
        # These files are also in MANIFEST.in so that they are always in
        # the source distribution independently of HAVE_CBLAS.
        multiarray_src.extend([join('src', 'multiarray', 'cblasfuncs.c'),
                               join('src', 'multiarray', 'python_xerbla.c'),
                               ])
        if uses_accelerate_framework(blas_info):
            multiarray_src.extend(get_sgemv_fix())
    else:
        extra_info = {}

    config.add_extension('multiarray',
                         sources=multiarray_src +
                                 [generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  join(codegen_dir, 'generate_numpy_api.py'),
                                  join('*.py')],
                         depends=deps + multiarray_deps,
                         libraries=['npymath', 'npysort'],
                         extra_info=extra_info)

    #######################################################################
    #                           umath module                              #
    #######################################################################

    # umath version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_umath_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'umath')
        sources = [
            join(local_dir, subpath, 'loops.h.src'),
            join(local_dir, subpath, 'loops.c.src'),
            join(local_dir, subpath, 'scalarmath.c.src'),
            join(local_dir, subpath, 'simd.inc.src')]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    umath_src = [
            join('src', 'umath', 'umathmodule.c'),
            join('src', 'umath', 'reduction.c'),
            join('src', 'umath', 'funcs.inc.src'),
            join('src', 'umath', 'simd.inc.src'),
            join('src', 'umath', 'loops.h.src'),
            join('src', 'umath', 'loops.c.src'),
            join('src', 'umath', 'ufunc_object.c'),
            join('src', 'umath', 'scalarmath.c.src'),
            join('src', 'umath', 'ufunc_type_resolution.c')]

    umath_deps = [
            generate_umath_py,
            join('include', 'numpy', 'npy_math.h'),
            join('include', 'numpy', 'halffloat.h'),
            join('src', 'multiarray', 'common.h'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'umath', 'simd.inc.src'),
            join(codegen_dir, 'generate_ufunc_api.py'),
            join('src', 'private', 'ufunc_override.h')] + npymath_sources

    config.add_extension('umath',
                         sources=umath_src +
                                 [generate_config_h,
                                 generate_numpyconfig_h,
                                 generate_umath_c,
                                 generate_ufunc_api],
                         depends=deps + umath_deps,
                         libraries=['npymath'],
                         )

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension('umath_tests',
                    sources=[join('src', 'umath', 'umath_tests.c.src')])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension('test_rational',
                    sources=[join('src', 'umath', 'test_rational.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension('struct_ufunc_test',
                    sources=[join('src', 'umath', 'struct_ufunc_test.c.src')])

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension('multiarray_tests',
                    sources=[join('src', 'multiarray', 'multiarray_tests.c.src'),
                             join('src', 'private', 'mem_overlap.c')],
                    depends=[join('src', 'private', 'mem_overlap.h'),
                             join('src', 'private', 'npy_extint128.h')])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension('operand_flag_tests',
                    sources=[join('src', 'umath', 'operand_flag_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config

if __name__ == '__main__':
    from numpy.distutils.core import setup
    setup(configuration=configuration)