Static code analysis and corrections
This commit is contained in:
@@ -0,0 +1,21 @@
|
||||
import numpy as np
|
||||
from ._fortran import *
|
||||
from scipy._lib._version import NumpyVersion
|
||||
|
||||
|
||||
# Don't use deprecated Numpy C API. Define this to a fixed version instead of
|
||||
# NPY_API_VERSION in order not to break compilation for released Scipy versions
|
||||
# when Numpy introduces a new deprecation. Use in setup.py::
|
||||
#
|
||||
# config.add_extension('_name', sources=['source_fname'], **numpy_nodepr_api)
|
||||
#
|
||||
if NumpyVersion(np.__version__) >= '1.10.0.dev':
|
||||
numpy_nodepr_api = dict(define_macros=[("NPY_NO_DEPRECATED_API",
|
||||
"NPY_1_9_API_VERSION")])
|
||||
else:
|
||||
numpy_nodepr_api = dict()
|
||||
|
||||
|
||||
from scipy._lib._testutils import PytestTester
|
||||
test = PytestTester(__name__)
|
||||
del PytestTester
|
||||
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
@@ -0,0 +1,124 @@
|
||||
import re
|
||||
import os
|
||||
import glob
|
||||
from distutils.dep_util import newer
|
||||
|
||||
|
||||
__all__ = ['needs_g77_abi_wrapper', 'split_fortran_files',
|
||||
'get_g77_abi_wrappers']
|
||||
|
||||
|
||||
def uses_mkl(info):
|
||||
r_mkl = re.compile("mkl")
|
||||
libraries = info.get('libraries', '')
|
||||
for library in libraries:
|
||||
if r_mkl.search(library):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def needs_g77_abi_wrapper(info):
|
||||
"""Returns True if g77 ABI wrapper must be used."""
|
||||
return uses_mkl(info)
|
||||
|
||||
|
||||
def get_g77_abi_wrappers(info):
|
||||
"""
|
||||
Returns file names of source files containing Fortran ABI wrapper
|
||||
routines.
|
||||
"""
|
||||
wrapper_sources = []
|
||||
|
||||
path = os.path.abspath(os.path.dirname(__file__))
|
||||
if needs_g77_abi_wrapper(info):
|
||||
wrapper_sources += [
|
||||
os.path.join(path, 'src', 'wrap_g77_abi_f.f'),
|
||||
os.path.join(path, 'src', 'wrap_g77_abi_c.c'),
|
||||
]
|
||||
else:
|
||||
wrapper_sources += [
|
||||
os.path.join(path, 'src', 'wrap_dummy_g77_abi.f'),
|
||||
]
|
||||
return wrapper_sources
|
||||
|
||||
|
||||
def split_fortran_files(source_dir, subroutines=None):
|
||||
"""Split each file in `source_dir` into separate files per subroutine.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source_dir : str
|
||||
Full path to directory in which sources to be split are located.
|
||||
subroutines : list of str, optional
|
||||
Subroutines to split. (Default: all)
|
||||
|
||||
Returns
|
||||
-------
|
||||
fnames : list of str
|
||||
List of file names (not including any path) that were created
|
||||
in `source_dir`.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is useful for code that can't be compiled with g77 because of
|
||||
type casting errors which do work with gfortran.
|
||||
|
||||
Created files are named: ``original_name + '_subr_i' + '.f'``, with ``i``
|
||||
starting at zero and ending at ``num_subroutines_in_file - 1``.
|
||||
|
||||
"""
|
||||
|
||||
if subroutines is not None:
|
||||
subroutines = [x.lower() for x in subroutines]
|
||||
|
||||
def split_file(fname):
|
||||
with open(fname, 'rb') as f:
|
||||
lines = f.readlines()
|
||||
subs = []
|
||||
need_split_next = True
|
||||
|
||||
# find lines with SUBROUTINE statements
|
||||
for ix, line in enumerate(lines):
|
||||
m = re.match(b'^\\s+subroutine\\s+([a-z0-9_]+)\\s*\\(', line, re.I)
|
||||
if m and line[0] not in b'Cc!*':
|
||||
if subroutines is not None:
|
||||
subr_name = m.group(1).decode('ascii').lower()
|
||||
subr_wanted = (subr_name in subroutines)
|
||||
else:
|
||||
subr_wanted = True
|
||||
if subr_wanted or need_split_next:
|
||||
need_split_next = subr_wanted
|
||||
subs.append(ix)
|
||||
|
||||
# check if no split needed
|
||||
if len(subs) <= 1:
|
||||
return [fname]
|
||||
|
||||
# write out one file per subroutine
|
||||
new_fnames = []
|
||||
num_files = len(subs)
|
||||
for nfile in range(num_files):
|
||||
new_fname = fname[:-2] + '_subr_' + str(nfile) + '.f'
|
||||
new_fnames.append(new_fname)
|
||||
if not newer(fname, new_fname):
|
||||
continue
|
||||
with open(new_fname, 'wb') as fn:
|
||||
if nfile + 1 == num_files:
|
||||
fn.writelines(lines[subs[nfile]:])
|
||||
else:
|
||||
fn.writelines(lines[subs[nfile]:subs[nfile+1]])
|
||||
|
||||
return new_fnames
|
||||
|
||||
exclude_pattern = re.compile('_subr_[0-9]')
|
||||
source_fnames = [f for f in sorted(glob.glob(os.path.join(source_dir, '*.f')))
|
||||
if not exclude_pattern.search(os.path.basename(f))]
|
||||
fnames = []
|
||||
for source_fname in source_fnames:
|
||||
created_files = split_file(source_fname)
|
||||
if created_files is not None:
|
||||
for cfile in created_files:
|
||||
fnames.append(os.path.basename(cfile))
|
||||
|
||||
return fnames
|
||||
@@ -0,0 +1,167 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import warnings
|
||||
|
||||
import numpy as np
|
||||
import numpy.distutils.system_info
|
||||
|
||||
from numpy.distutils.system_info import (system_info,
|
||||
numpy_info,
|
||||
NotFoundError,
|
||||
BlasNotFoundError,
|
||||
LapackNotFoundError,
|
||||
AtlasNotFoundError,
|
||||
LapackSrcNotFoundError,
|
||||
BlasSrcNotFoundError,
|
||||
dict_append,
|
||||
get_info as old_get_info)
|
||||
|
||||
from scipy._lib._version import NumpyVersion
|
||||
|
||||
|
||||
if NumpyVersion(np.__version__) >= "1.15.0.dev":
|
||||
# For new enough numpy.distutils, the ACCELERATE=None environment
|
||||
# variable in the top-level setup.py is enough, so no need to
|
||||
# customize BLAS detection.
|
||||
get_info = old_get_info
|
||||
else:
|
||||
# For numpy < 1.15.0, we need overrides.
|
||||
|
||||
def get_info(name, notfound_action=0):
|
||||
# Special case our custom *_opt_info
|
||||
cls = {'lapack_opt': lapack_opt_info,
|
||||
'blas_opt': blas_opt_info}.get(name.lower())
|
||||
if cls is None:
|
||||
return old_get_info(name, notfound_action)
|
||||
return cls().get_info(notfound_action)
|
||||
|
||||
#
|
||||
# The following is copypaste from numpy.distutils.system_info, with
|
||||
# OSX Accelerate-related parts removed.
|
||||
#
|
||||
|
||||
class lapack_opt_info(system_info):
|
||||
|
||||
notfounderror = LapackNotFoundError
|
||||
|
||||
def calc_info(self):
|
||||
|
||||
lapack_mkl_info = get_info('lapack_mkl')
|
||||
if lapack_mkl_info:
|
||||
self.set_info(**lapack_mkl_info)
|
||||
return
|
||||
|
||||
openblas_info = get_info('openblas_lapack')
|
||||
if openblas_info:
|
||||
self.set_info(**openblas_info)
|
||||
return
|
||||
|
||||
openblas_info = get_info('openblas_clapack')
|
||||
if openblas_info:
|
||||
self.set_info(**openblas_info)
|
||||
return
|
||||
|
||||
atlas_info = get_info('atlas_3_10_threads')
|
||||
if not atlas_info:
|
||||
atlas_info = get_info('atlas_3_10')
|
||||
if not atlas_info:
|
||||
atlas_info = get_info('atlas_threads')
|
||||
if not atlas_info:
|
||||
atlas_info = get_info('atlas')
|
||||
|
||||
need_lapack = 0
|
||||
need_blas = 0
|
||||
info = {}
|
||||
if atlas_info:
|
||||
l = atlas_info.get('define_macros', [])
|
||||
if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \
|
||||
or ('ATLAS_WITHOUT_LAPACK', None) in l:
|
||||
need_lapack = 1
|
||||
info = atlas_info
|
||||
|
||||
else:
|
||||
warnings.warn(AtlasNotFoundError.__doc__, stacklevel=2)
|
||||
need_blas = 1
|
||||
need_lapack = 1
|
||||
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
|
||||
|
||||
if need_lapack:
|
||||
lapack_info = get_info('lapack')
|
||||
#lapack_info = {} ## uncomment for testing
|
||||
if lapack_info:
|
||||
dict_append(info, **lapack_info)
|
||||
else:
|
||||
warnings.warn(LapackNotFoundError.__doc__, stacklevel=2)
|
||||
lapack_src_info = get_info('lapack_src')
|
||||
if not lapack_src_info:
|
||||
warnings.warn(LapackSrcNotFoundError.__doc__, stacklevel=2)
|
||||
return
|
||||
dict_append(info, libraries=[('flapack_src', lapack_src_info)])
|
||||
|
||||
if need_blas:
|
||||
blas_info = get_info('blas')
|
||||
if blas_info:
|
||||
dict_append(info, **blas_info)
|
||||
else:
|
||||
warnings.warn(BlasNotFoundError.__doc__, stacklevel=2)
|
||||
blas_src_info = get_info('blas_src')
|
||||
if not blas_src_info:
|
||||
warnings.warn(BlasSrcNotFoundError.__doc__, stacklevel=2)
|
||||
return
|
||||
dict_append(info, libraries=[('fblas_src', blas_src_info)])
|
||||
|
||||
self.set_info(**info)
|
||||
return
|
||||
|
||||
class blas_opt_info(system_info):
|
||||
|
||||
notfounderror = BlasNotFoundError
|
||||
|
||||
def calc_info(self):
|
||||
|
||||
blas_mkl_info = get_info('blas_mkl')
|
||||
if blas_mkl_info:
|
||||
self.set_info(**blas_mkl_info)
|
||||
return
|
||||
|
||||
blis_info = get_info('blis')
|
||||
if blis_info:
|
||||
self.set_info(**blis_info)
|
||||
return
|
||||
|
||||
openblas_info = get_info('openblas')
|
||||
if openblas_info:
|
||||
self.set_info(**openblas_info)
|
||||
return
|
||||
|
||||
atlas_info = get_info('atlas_3_10_blas_threads')
|
||||
if not atlas_info:
|
||||
atlas_info = get_info('atlas_3_10_blas')
|
||||
if not atlas_info:
|
||||
atlas_info = get_info('atlas_blas_threads')
|
||||
if not atlas_info:
|
||||
atlas_info = get_info('atlas_blas')
|
||||
|
||||
need_blas = 0
|
||||
info = {}
|
||||
if atlas_info:
|
||||
info = atlas_info
|
||||
else:
|
||||
warnings.warn(AtlasNotFoundError.__doc__, stacklevel=2)
|
||||
need_blas = 1
|
||||
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
|
||||
|
||||
if need_blas:
|
||||
blas_info = get_info('blas')
|
||||
if blas_info:
|
||||
dict_append(info, **blas_info)
|
||||
else:
|
||||
warnings.warn(BlasNotFoundError.__doc__, stacklevel=2)
|
||||
blas_src_info = get_info('blas_src')
|
||||
if not blas_src_info:
|
||||
warnings.warn(BlasSrcNotFoundError.__doc__, stacklevel=2)
|
||||
return
|
||||
dict_append(info, libraries=[('fblas_src', blas_src_info)])
|
||||
|
||||
self.set_info(**info)
|
||||
return
|
||||
Reference in New Issue
Block a user