I wish to deploy a package to PyPi using setuptools. However, the core part of the package is actually written in Fortran, and I am using f2py to wrap it in python. Basically the project's structure looks like this:
my_project
license.txt
README.md
setup.py
my_project
init.py
myfunc.py
hello.so
The module myfunc.py imports hello.so (import my_project.hello) which can then be used by functions inside myfunc.py. This works perfectly on my machine.
Then I tried standard setuptools installation: sudo python3 setup.py install on my Ubuntu, and it gets installed perfectly. But unfortunately, while importing, it throws ModuleNotFoundError: No module named 'hello'.
Now, from what I understand, on Linux based systems, for python, the shared libraries *.so are stored in /usr/lib/python3/dist-packages/. So I manually copied this hello.so there, and I got a working package! But of course this works only locally. What I would like to do is to tell setuptools to include hello.so inside the python-egg and automatically do the copying etc so that when a user uses pip3 install my_package, they will have access to this shared library automatically. I can see that numpy has somehow achieved that but even after looking at their code, I haven't been able to decode how they did it. Can someone help me with this? Thanks in advance.
You can achieve this with a setup.py file like this (simplified version, keep only the relevant parts for building external modules)
import os
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
class f2py_Extension(Extension):
def __init__(self, name, sourcedirs):
Extension.__init__(self, name, sources=[])
self.sourcedirs = [os.path.abspath(sourcedir) for sourcedir in sourcedirs]
self.dirs = sourcedirs
class f2py_Build(build_ext):
def run(self):
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
# compile
for ind,to_compile in enumerate(ext.sourcedirs):
module_loc = os.path.split(ext.dirs[ind])[0]
module_name = os.path.split(to_compile)[1].split('.')[0]
os.system('cd %s;f2py -c %s -m %s' % (module_loc,to_compile,module_name))
setup(
name="foo",
ext_modules=[f2py_Extension('fortran_external',['foo/one.F90','foo/bar/two.F90'])],
cmdclass=dict(build_ext=f2py_Build),
)
The essential parts for building an external module are ext_modules and cmdclass in setup(...). ext_modules is just a list of Extension instances, each of which describes a set of extension modules. In the setup.py above, I tell ext_modules I want to create two external modules with two source files foo/test.F90 and foo/bar/two.F90. Based on ext_modules, cmdclass is responsible for compiling the two modules, in our case, the command for compiling the module is
'cd %s;f2py -c %s -m %s' % (module_loc,to_compile,module_name)
Project structure before installation
├── foo
│ ├── __init__.py
│ ├── bar
│ │ └── two.F90
│ └── one.F90
└── setup.py
Project structure after python setup.py install
├── build
│ └── bdist.linux-x86_64
├── dist
│ └── foo-0.0.0-py3.7-linux-x86_64.egg
├── foo
│ ├── __init__.py
│ ├── __pycache__
│ │ └── __init__.cpython-37.pyc
│ ├── bar
│ │ ├── two.F90
│ │ └── two.cpython-37m-x86_64-linux-gnu.so
│ ├── one.F90
│ └── one.cpython-37m-x86_64-linux-gnu.so
├── foo.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ └── top_level.txt
└── setup.py
The two source files one.F90 and two.F90 are very simple
one.F90
module test
implicit none
contains
subroutine add(a)
implicit none
integer :: a
integer :: b
b = a + 1
print *, 'one',b
end subroutine add
end module test
two.F90
module test
implicit none
contains
subroutine add(a)
implicit none
integer :: a
integer :: b
b = a + 2
print *, 'two',b
end subroutine add
end module test
After I installed the package, I can successfully run
>>> from foo.bar.two import test
>>> test.add(5)
two 7
and
>>> from foo.one import test
>>> test.add(5)
one 6
Here is an approach based on F2PY's documentation (the example there covers building multiple F2PY modules, and multiple source files per module), making use of numpy.distutils, that supports Fortran source files.
The structure of a minimal example with multiple F2PY extension modules is based on a src directory layout. It is not necessary/required, but has the advantage that the test routine cannot run unless the package has been installed successfully.
Source layout
my_project
|
+-- src
| |
| +-- my_project
| |
| +-- __init__.py
| +-- mod1.py
| +-- funcs_m.f90
| +-- two
| |
| +-- pluss2.f90
| +-- times2.f90
|
+-- test_my_project.py
+-- setup.py
setup.py
from setuptools import find_packages
from numpy.distutils.core import setup, Extension
ext1 = Extension(name='my_project.modf90',
sources=['src/my_project/funcs_m.f90'],
f2py_options=['--quiet'],
)
ext2 = Extension(name='my_project.oldf90',
sources=['src/my_project/two/plus2.f90', 'src/my_project/two/times2.f90'],
f2py_options=['--quiet'],
)
setup(name="my_project",
version="0.0.1",
package_dir={"": "src"},
packages=find_packages(where="src"),
ext_modules=[ext1, ext2])
__init__.py
The __init__.py file is empty. (Can e.g. import the F2PY modules here if desired)
mod1.py
def add(a, b):
""" add inputs a and b, and return """
return a + b
funcs_m.f90
module funcs_m
implicit none
contains
subroutine add(a, b, c)
integer, intent(in) :: a
integer, intent(in) :: b
integer, intent(out) :: c
c = a + b
end subroutine add
end module funcs_m
plus2.f90
subroutine plus2(x, y)
integer, intent(in) :: x
integer, intent(out) :: y
y = x + 2
end subroutine plus2
times2.f90
subroutine times2(x, y)
integer, intent(in) :: x
integer, intent(out) :: y
y = x * 2
end subroutine times2
test_my_project.py
import my_project.mod1
import my_project.oldf90
import my_project.modf90
print("mod1.add: 1 + 2 = ", my_project.mod1.add(1, 2))
print("modf90.funcs_m.add: 1 + 2 = ", my_project.modf90.funcs_m.add(1, 2))
x = 1
x = my_project.oldf90.plus2(x)
print("oldf90.plus2: 1 + 2 = ", x)
x = my_project.oldf90.times2(x)
print("oldf90.plus2: 3 * 2 = ", x)
Installing
Now, one can use pip to install the package. There are several advantages to using pip (including ease of upgrading, or uninstalling) as opposed to setup.py install (but this can still be used for building the package for distribution!). From the directory containing setup.py:
> python -m pip install .
Testing
And then, to test the just installed package
> python test_my_project.py
mod1.add: 1 + 2 = 3
modf90.funcs_m.add: 1 + 2 = 3
oldf90.plus2: 1 + 2 = 3
oldf90.plus2: 3 * 2 = 6
This setup has been tested with success on Windows 10 (with ifort), on Ubuntu 18.04 (with gfortran) and on MacOS High Sierra (with gfortran), all with Python 3.6.3.
Related
I have a a top level directory and then few subdirectories underneath it which in turn have few subdirectories and files.
the tree view looks like this:-
treeview
├── abc
│ ├── pqr
│ │ ├── 1
│ │ ├── 2
│ │ └── 3
│ └── sty
└── xyz
Now I want my script to ignore walking into "abc" and "1"(this is a directory and not a file).
I have come up with the below script:-
import os
import shutil
files_to_keep = []
files_grandlist = []
dirs_to_keep = []
dirs_grandlist = []
rootpath="./treeview"
exclude = ['./treeview/abc', './treeview/abc/pqr/1']
exclude_temp = set(['abc','1'])
for path in exclude:
for root, dirs, files in os.walk(path):
dirs[:] = [d for d in dirs if d not in exclude_temp]
dirs_to_keep.append(root)
print("================dirs to keep======")
for i in dirs_to_keep:
print(i)
for root, dirs, files in os.walk(rootpath):
dirs_grandlist.append(root)
print("===============grandlist==========")
for i in dirs_grandlist:
print(i)
print("filter============================")
for i in list(set(dirs_grandlist)^set(dirs_to_keep)):
print(i)
This gives me output like below when run:
================dirs to keep======
./treeview/abc
./treeview/abc/pqr
./treeview/abc/pqr/2
./treeview/abc/pqr/3
./treeview/abc/sty
./treeview/abc/pqr/1
===============grandlist==========
./treeview
./treeview/abc
./treeview/abc/pqr
./treeview/abc/pqr/1
./treeview/abc/pqr/2
./treeview/abc/pqr/3
./treeview/abc/sty
./treeview/xyz
filter============================
./treeview
./treeview/xyz
The idea is to capture a list of directories/subdirectories under treeview top level directory and then capture the same information for a list of excluded directories.
the output under "filter============================" line should give me a list of directories which I want to remove from the filesystem.
Appreciate any help here.
Given the following Python module layout:
app/
├── drivers
│ ├── mydriver
│ │ ├── driver.py
│ │ └── __init__.py
│ └── __init__.py
├── __init__.py
└── main.py
I am trying to dynamically import the "mydriver" module in main.py:
import os
import importlib
driver_dir = os.path.join(os.path.dirname(__file__), 'drivers')
loader_details = (
importlib.machinery.ExtensionFileLoader,
importlib.machinery.EXTENSION_SUFFIXES
)
finder = importlib.machinery.FileFinder(driver_dir, loader_details)
spec = finder.find_spec('mydriver')
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# The following line produces AttributeError: module 'mydriver' has no attribute 'driver'
driver = getattr(module, 'driver')
drivers/mydriver/__init__.py contains the following:
from . import driver
print("TEST")
So the result is the Attribute error as written in the inline comment. The "print()" from __init__.py is also not being executed.
Any hints why the module is apparently not being evaluated?
While I haven't found a root cause, I did find a (not so pretty) workaround. For some reason, the module can not be executed if it was found using the FileFinder. It does however execute if I do the following:
sys.path.insert(0, driver_dir)
spec = importlib.util.find_spec('mydriver')
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
So in short, I don't know what Python wants from a Finder to also execute modules, not just files. Well, at least I have working code for now...
I'm trying include-search based settings in my vim configuration. Which is causing [i, [i, [d, ]d, etc. cmds not working as expected. In addition when :checkpath! is run to look at included files, its not working as expected either.
Could you please help me let me know what am i doing wrong & what can i do to fix this.
My project root looks like below. I've intentionally not listing entries of venv dir, just to keep the entry list small.
\> tree -I "build|dist|*egg*|__pycache__" -L 2
.
├── MANIFEST.in
├── README.md
├── acs_datamodels-0.0.1-py3-none-any.whl
├── ai_core-0.0.1-py3-none-any.whl
├── datamodels
│ ├── __init__.py
│ ├── fabric.py
│ ├── literals.py
│ └── precompute.py
├── encrypt.log
├── setup.py
├── tests
│ ├── __init__.py
│ ├── data
│ ├── test_fabric.py
│ └── test_precompute.py
└── venv
├── bin
├── include
├── lib
├── lib64 -> lib
└── pyvenv.cfg
my :set path? looks like
path=datamodels,venv/lib/python3.7/site-packages/*/**2
my :set include? looks like
^\s*\(from\|import\)\s*\zs\(\S\+\|\S\+\s*import\{1}\s*\S\+\)\ze\($\|\s*as\|,\)
my :set includeexpr? looks like
includeexpr=PyInclude(v:fname)
contents of ~/.vim/after/ftplugin/python.vim is as below
set shiftwidth=4 tabstop=4 softtabstop=4 expandtab autoindent smartindent
setlocal wildignore=*.pyc,bin,*egg*,__pycache__/*,build,dist
setlocal include=^\\s*\\(from\\\|import\\)\\s*\\zs\\(\\S\\+\\\|\\S\\+\\s*import\\{1}\\s*\\S\\+\\)\\ze\\($\\\|\\s*as\\\|,\\)
function! PyInclude(fname)
echom "fname: " a:fname
let parts = split(a:fname, ' import ')
echom "parts: " parts
let l = parts[0] " (1) logging (2) ai_core.commons (3) datamodels.literals
if len(parts) > 1
let r = parts[1] " (1) datamodels (2) datetime
let joined = join([l, r], '.') " datetime.datetime, ai_core.commons.decode_token
let fp = substitute(joined, '\.', '/', 'g') . '.py' " datetime/datetime, ai_core/commons/decode_token
let found = glob(fp, 1)
echom "parts > 1" found
if len(found)
return found
endif
endif
let kp = substitute(l, '\.', '/', 'g') . '.py'
echom "parts < 1" kp
return kp " ai_core/commons.py, datamodels/literals.py
endfunction
setlocal includeexpr=PyInclude(v:fname)
Output of :checkpath! is as below. Please note the output also contains some debug msg i'd added. If you see from typing import Dict and import logging has been identified from venv dir, but other libraries: from ai_core.commons import decode_token which i've installed from wheel file are not recognized, even though they're are there in venv dir. Also, the other module from current file's relative path import datamodels.literals is not recognized.
fname: typing import Dict
parts: ['typing', 'Dict']
parts > 1
parts < 1 typing.py
venv/lib/python3.7/site-packages/pip/_internal/utils/typing.py
fname: ai_core.commons import decode_token
parts: ['ai_core.commons', 'decode_token']
parts > 1
parts < 1 ai_core/commons.py
ai_core.commons import decode_token NOT FOUND
fname: logging
parts: ['logging']
parts < 1 logging.py
venv/lib/python3.7/site-packages/pip/_internal/utils/logging.py
fname: datamodels.literals
parts: ['datamodels.literals']
parts < 1 datamodels/literals.py
datamodels.literals NOT FOUND
I've learnt these from this video, thanks a ton to Leeren for introducing these and many more to me.
&path is a whitelist of directories in which to search for files. Basically, you can think of include search as appending the filename to every directory listed in &path and seeing if that path leads to something (it doesn't really work like that but that's a good enough mental model for the problem at hand).
For example, if your filename is quux and your &path is foo,bar,baz, then Vim is going to search for the following files:
foo/quux
bar/quux
baz/quux
Here, the filename is datamodels/literals.py and your &path is datamodels,venv/lib/python3.7/site-packages/*/**2 so Vim is going to search for:
datamodels/datamodels/literals.py
venv/lib/python3.7/site-packages/*/**/datamodels/literals.py
venv/...
and fail for rather obvious reasons.
There are two ways to go around that:
Prepend your &path with .,,, which tells Vim to search for files in the directory of the current file and in the working directory. Your &path would look like this:
.,,datamodels,venv/lib/python3.7/site-packages/*/**2
which should let Vim find datamodels/literals.py easily:
datamodels/literals.py <-- BINGO!
datamodels/datamodels/literals.py
venv/lib/python3.7/site-packages/*/**/datamodels/literals.py
venv/...
Make your &includeexpr return literals.py, which can be found with your current &path, instead of datamodels/literals.py:
datamodels/literals.py <-- BINGO!
venv/lib/python3.7/site-packages/*/**/literals.py
venv/...
IMO, you should do both.
Did :set path=.,,venv/lib/python3/site-packages/**2 did the magic. The correction is **2 to represent subdirs with 2 level deep instead of */**2 from my original post.
Yes as #romail pointed, i did try appending each filename from includeexpr to the dir list in path. Thank you.
I have a list of colors with a txt file containing URLs of images of those colors. I am trying to create a folder to contain images of each color and move this directory so that I may ultimately download the images.
I am able to perform this for each element of the list individually, but this is tedious and I would prefer to automate it.
classes = ['red', 'orange', 'yellow', 'green', 'blue', 'purple']
This is the code I currently have for each color:
folder = 'red'
file = 'red.txt'
mv red.txt data/colors
path = Path('data/colors')
dest = path/colors
dest.mkdir(parents=True, exist_ok=True)
download_images(path/file, dest, max_pics=200)
I expect to have a folder per color containing the respective downloaded images.
Your list of colors is in classes python list. You have <color name>.txt files containing URLs of images of those colors listed in classes list. So you have an initial directory structure which looks like following directory tree:
.
├── blue.txt
├── green.txt
├── orange.txt
├── purple.txt
├── red.txt
├── script.py
└── yellow.txt
Now you want to create separate directories for each color. So finally your directory structure should look like following directory tree:
.
├── data
│ └── colors
│ ├── blue
│ ├── blue.txt
│ ├── green
│ ├── green.txt
│ ├── orange
│ ├── orange.txt
│ ├── purple
│ ├── purple.txt
│ ├── red
│ ├── red.txt
│ ├── yellow
│ └── yellow.txt
└── script.py
Where your download_image() method will download the image for given URLs in <color name>.txt file which it receives as one of the arguments. It also receives the destination of the image directory to be placed and the maximum no of images it should download.
If I understood your problem correctly following code would solve your problem. Code is well commented and self-explanatory. You can drop comments to ask for more clarifications.
import os
base_path = "data/colors/"
# create base path directories if not already present
os.system("mkdir -p data")
os.system("mkdir -p data/colors")
classes = ['red', 'orange', 'yellow', 'green', 'blue', 'purple']
# dummy download image function
def download_image(path, dest, max_pics):
print("URL file path: " + path)
print("Image destination: " + dest)
print("No of Images to be downloaded: " + str(max_pics))
if __name__ == "__main__":
for colour in classes:
# create directories for each colour if not already present
os.system("mkdir -p " + base_path + colour)
# move <colour_name>.txt file into base path
os.system("mv " + colour+".txt " + base_path)
dest = base_path + colour
# call download_image method
download_image(base_path+colour+".txt", dest, max_pics=200)
I do not get the automated build to update the project with SCons. First I change something in the source files and the scons tells me:
scons: done reading SConscript files.
scons: Building targets ...
scons: `.' is up to date.
scons: done building targets.
How to update an automated build?
UPDATE 20170601:
leder#PC-LAP127:~/Source/Eiffel/PF_HP-mt$ scons --tree=prune project=pf_hp.ecf
+-.
+-.sconf_temp
+-SConstruct
+-build
| +-build/F_code-unix.tar
| | +-pf_hp.ecf
| | +-project.py
| | +-/home/leder/Source/Eiffel/library/Eiffel-Loop/precomp/linux-x86-64/console-application.ecf
| | +-/home/leder/Source/Eiffel/library/Eiffel-Loop/precomp/console-application.ecf
| +-build/linux-x86-64
| +-build/linux-x86-64/package
| +-build/linux-x86-64/package/bin
| +-build/linux-x86-64/package/bin/pf_hp
| +-[build/F_code-unix.tar]
+-config.log
+-pf_hp.ecf
+-project.py
leder#PC-LAP127:~/Source/Eiffel/PF_HP-mt$ tree -L 2 .
.
├── build
│ ├── F_code-unix.tar
│ ├── linux-x86-64
│ └── version.txt
├── config.log
├── EIFGENs
│ └── classic
├── git_push.sh
├── input.txt
├── LICENSE.gpl
├── LIESMICH.txt
├── pf_hp.ecf
├── pf_hp.ecf.old
├── pf_hp.pecf
├── project.py
├── project.pyc
├── README.txt
├── SConstruct
├── source
│ ├── application_root.e
│ ├── build_info.e
│ ├── folding
│ ├── notes
│ ├── sub-applications
│ └── testing
└── test.sh
9 directories, 17 files
leder#PC-LAP127:~/Source/Eiffel/PF_HP-mt$ less SConstruct
import eiffel_loop.eiffel.SConstruct
SConstruct (END)
UPDATE_20170601, eiffel_loop.eiffel.SConstruct.py:
# author: "Finnian Reilly"
# copyright: "Copyright (c) 2001-2012 Finnian Reilly"
# contact: "finnian at eiffel hyphen loop dot com"
# license: "MIT license (See: en.wikipedia.org/wiki/MIT_License)"
# date: "3 June 2010"
# revision: "0.2"
import os, sys
from os import path
from eiffel_loop.eiffel import project
from eiffel_loop.scons import eiffel
from eiffel_loop.eiffel.ecf import EIFFEL_CONFIG_FILE
from eiffel_loop.eiffel.ecf import FREEZE_BUILD
from eiffel_loop.eiffel.ecf import C_CODE_TAR_BUILD
from eiffel_loop.eiffel.ecf import FINALIZED_BUILD
from SCons.Script import *
# SCRIPT START
arguments = Variables()
arguments.Add (EnumVariable('cpu', 'Set target cpu for compiler', 'x64', allowed_values=('x64', 'x86')))
arguments.Add (
EnumVariable('action', 'Set build action', 'finalize',
allowed_values=(
Split ("freeze finalize finalize_and_test finalize_and_install install_resources make_installers")
)
)
)
arguments.Add (BoolVariable ('compile_eiffel', 'Compile Eiffel source (no implies C compile only)', 'yes'))
arguments.Add (BoolVariable ('install', 'Set to \'yes\' to install finalized release', 'no'))
arguments.Add (PathVariable ('project', 'Path to Eiffel configuration file', 'default.ecf'))
#arguments.Add (
# ListVariable (
# 'MSC_options', 'Visual Studio setenv.cmd options', '', Split ("/Debug /Release /x86 /x64 /ia64 /vista /xp /2003 /2008 /win7")
# )
#)
env = Environment (variables = arguments)
Help (arguments.GenerateHelpText (env) + '\nproject: Set to name of Eiffel project configuration file (*.ecf)\n')
if env.GetOption ('help'):
None
else:
is_windows_platform = sys.platform == 'win32'
project_py = project.read_project_py ()
# MSC_options = env.get ('MSC_options').data
# if MSC_options:
# project_py.MSC_options = MSC_options
# print 'MSC_options:', project_py.MSC_options
ecf_path = env.get ('project')
action = env.get ('action')
compile_eiffel = env.get ('compile_eiffel')
project_py.set_build_environment (env.get ('cpu'))
env.Append (ENV = os.environ, ISE_PLATFORM = os.environ ['ISE_PLATFORM'])
if 'ISE_C_COMPILER' in os.environ:
env.Append (ISE_C_COMPILER = os.environ ['ISE_C_COMPILER'])
config = EIFFEL_CONFIG_FILE (ecf_path)
project_files = [ecf_path, 'project.py']
if action == 'install_resources':
build = FREEZE_BUILD (config, project_py)
build.post_compilation ()
else:
if action in ['finalize', 'make_installers']:
tar_build = C_CODE_TAR_BUILD (config, project_py)
build = FINALIZED_BUILD (config, project_py)
if compile_eiffel:
env.Append (EIFFEL_BUILD = tar_build)
env.Append (BUILDERS = {'eiffel_compile' : Builder (action = eiffel.compile_eiffel)})
f_code = env.eiffel_compile (tar_build.target (), project_files)
else:
f_code = None
else:
build = FREEZE_BUILD (config, project_py)
f_code = None
env.Append (C_BUILD = build)
env.Append (BUILDERS = {'c_compile' : Builder (action = eiffel.compile_C_code)})
if f_code:
executable = env.c_compile (build.target (), tar_build.target ())
else:
executable = env.c_compile (build.target (), project_files)
if build.precompile_path:
env.Append (BUILDERS = {'precomp_copier' : Builder (action = eiffel.copy_precompile)})
precompile_name = path.basename (build.precompile_path)
precompile_dir = path.dirname (path.dirname (build.precompile_path))
precomp_ecf = env.precomp_copier (build.precompile_path, path.join (precompile_dir, precompile_name))
if f_code:
Depends (tar_build.target (), build.precompile_path)
else:
Depends (executable, build.precompile_path)
eiffel.check_C_libraries (env, build)
if len (build.SConscripts) > 0:
print "\nDepends on External libraries:"
for script in build.SConscripts:
print "\t" + script
SConscript (build.SConscripts, exports='env')
# only make library a dependency if it doesn't exist or object files are being cleaned out
lib_dependencies = []
for lib in build.scons_buildable_libs:
if env.GetOption ('clean') or not path.exists (lib):
if not lib in lib_dependencies:
lib_dependencies.append (lib)
Depends (executable, lib_dependencies)
productions = [executable, precomp_ecf]
if f_code:
productions.append (tar_build.target ())
env.NoClean (productions)
If SCons does not know or see what files have been changed, an alternative is to run EiffelStudio compiler every time. It performs quick incremental recompilation in workbench mode, so you are not penalized by waiting for recompilation from scratch.
Note. If you are not using graphical environment, projects can be built with a slightly smaller and slightly faster ecb version of the compiler (instead of the regular ec). But this comes at the cost of incompatibility with the IDE (e.g., in completely non-interactive compilation setups).