jax.lax.fori_loop Abstract tracer value encountered where concrete value is expected - jax

I've a JAX loop that looks like this where inside the step function I use min between the two arguments
import jax
def step(timestep: int, order: int = 4) -> int:
order = min(timestep + 1, order)
return order
num_steps = 10
order = 100
order = jax.lax.fori_loop(0, num_steps, step, order)
The above code fails with a jax._src.errors.ConcretizationTypeError. This is is the full stacktrace:
WARNING:jax._src.lib.xla_bridge:No GPU/TPU found, falling back to CPU. (Set TF_CPP_MIN_LOG_LEVEL=0 and rerun for more info.)
---------------------------------------------------------------------------
UnfilteredStackTrace Traceback (most recent call last)
<ipython-input-4-9ec280f437cb> in <module>
2 order = 100
----> 3 order = jax.lax.fori_loop(0, num_steps, step, order)
16 frames
/usr/local/lib/python3.8/dist-packages/jax/_src/traceback_util.py in reraise_with_filtered_traceback(*args, **kwargs)
161 try:
--> 162 return fun(*args, **kwargs)
163 except Exception as e:
/usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/loops.py in fori_loop(lower, upper, body_fun, init_val)
1691
-> 1692 (_, result), _ = scan(_fori_scan_body_fun(body_fun), (lower_, init_val),
1693 None, length=upper_ - lower_)
/usr/local/lib/python3.8/dist-packages/jax/_src/traceback_util.py in reraise_with_filtered_traceback(*args, **kwargs)
161 try:
--> 162 return fun(*args, **kwargs)
163 except Exception as e:
/usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/loops.py in scan(f, init, xs, length, reverse, unroll)
258 # necessary, a second time with modified init values.
--> 259 init_flat, carry_avals, carry_avals_out, init_tree, *rest = _create_jaxpr(init)
260 new_init_flat, changed = _promote_weak_typed_inputs(init_flat, carry_avals, carry_avals_out)
/usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/loops.py in _create_jaxpr(init)
244 carry_avals = tuple(_map(_abstractify, init_flat))
--> 245 jaxpr, consts, out_tree = _initial_style_jaxpr(
246 f, in_tree, (*carry_avals, *x_avals), "scan")
/usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/common.py in _initial_style_jaxpr(fun, in_tree, in_avals, primitive_name)
59 primitive_name: Optional[str] = None):
---> 60 jaxpr, consts, out_tree = _initial_style_open_jaxpr(
61 fun, in_tree, in_avals, primitive_name)
/usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/common.py in _initial_style_open_jaxpr(fun, in_tree, in_avals, primitive_name)
53 debug = pe.debug_info(fun, in_tree, False, primitive_name or "<unknown>")
---> 54 jaxpr, _, consts = pe.trace_to_jaxpr_dynamic(wrapped_fun, in_avals, debug)
55 return jaxpr, consts, out_tree()
/usr/local/lib/python3.8/dist-packages/jax/_src/profiler.py in wrapper(*args, **kwargs)
313 with TraceAnnotation(name, **decorator_kwargs):
--> 314 return func(*args, **kwargs)
315 return wrapper
/usr/local/lib/python3.8/dist-packages/jax/interpreters/partial_eval.py in trace_to_jaxpr_dynamic(fun, in_avals, debug_info, keep_inputs)
1980 main.jaxpr_stack = () # type: ignore
-> 1981 jaxpr, out_avals, consts = trace_to_subjaxpr_dynamic(
1982 fun, main, in_avals, keep_inputs=keep_inputs, debug_info=debug_info)
/usr/local/lib/python3.8/dist-packages/jax/interpreters/partial_eval.py in trace_to_subjaxpr_dynamic(fun, main, in_avals, keep_inputs, debug_info)
1997 in_tracers_ = [t for t, keep in zip(in_tracers, keep_inputs) if keep]
-> 1998 ans = fun.call_wrapped(*in_tracers_)
1999 out_tracers = map(trace.full_raise, ans)
/usr/local/lib/python3.8/dist-packages/jax/linear_util.py in call_wrapped(self, *args, **kwargs)
166 try:
--> 167 ans = self.f(*args, **dict(self.params, **kwargs))
168 except:
/usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/loops.py in scanned_fun(loop_carry, _)
1607 i, x = loop_carry
-> 1608 return (i + 1, body_fun()(i, x)), None
1609 return scanned_fun
<ipython-input-2-2e3345899235> in step(timestep, order)
1 def step(timestep: int, order: int = 100) -> int:
----> 2 order = min(timestep + 1, order)
3 return order
/usr/local/lib/python3.8/dist-packages/jax/core.py in __bool__(self)
633 def __nonzero__(self): return self.aval._nonzero(self)
--> 634 def __bool__(self): return self.aval._bool(self)
635 def __int__(self): return self.aval._int(self)
/usr/local/lib/python3.8/dist-packages/jax/core.py in error(self, arg)
1266 def error(self, arg):
-> 1267 raise ConcretizationTypeError(arg, fname_context)
1268 return error
UnfilteredStackTrace: jax._src.errors.ConcretizationTypeError: Abstract tracer value encountered where concrete value is expected: Traced<ShapedArray(bool[], weak_type=True)>with<DynamicJaxprTrace(level=1/0)>
The problem arose with the `bool` function.
The error occurred while tracing the function scanned_fun at /usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/loops.py:1606 for scan. This concrete value was not available in Python because it depends on the values of the argument 'loop_carry'.
See https://jax.readthedocs.io/en/latest/errors.html#jax.errors.ConcretizationTypeError
The stack trace below excludes JAX-internal frames.
The preceding is the original exception that occurred, unmodified.
--------------------
The above exception was the direct cause of the following exception:
ConcretizationTypeError Traceback (most recent call last)
<ipython-input-4-9ec280f437cb> in <module>
1 num_steps = 10
2 order = 100
----> 3 order = jax.lax.fori_loop(0, num_steps, step, order)
<ipython-input-2-2e3345899235> in step(timestep, order)
1 def step(timestep: int, order: int = 100) -> int:
----> 2 order = min(timestep + 1, order)
3 return order
ConcretizationTypeError: Abstract tracer value encountered where concrete value is expected: Traced<ShapedArray(bool[], weak_type=True)>with<DynamicJaxprTrace(level=1/0)>
The problem arose with the `bool` function.
The error occurred while tracing the function scanned_fun at /usr/local/lib/python3.8/dist-packages/jax/_src/lax/control_flow/loops.py:1606 for scan. This concrete value was not available in Python because it depends on the values of the argument 'loop_carry'.
See https://jax.readthedocs.io/en/latest/errors.html#jax.errors.ConcretizationTypeError
Everything works fine if instead of using jax.lax.fori_loop i use a simple python loop, but my original code will end up very slow. How can I fix this issue?

Use jax.numpy.minimum in place of min:
def step(timestep: int, order: int = 4) -> int:
order = jax.numpy.minimum(timestep + 1, order)
return order
The reason min does not work is that in the course of executing code within jit, grad, vmap, fori_loop, etc., JAX replaces concrete values with abstract tracers, and Python functions like min don't know how to handle these abstract values. See How to Think in JAX for more background on this.

Related

Color heatmap in different colors by column in dataframe

I have a dataframe which i'm trying to plot in a heatmap.
df = pd.DataFrame(np.random.randint(0,2,size=(10, 5)),
columns=['1', '2','3','4','5'])
cluster = [1,1,2,2,3,3,4,4,5,5]
df['cluster'] = cluster
x_axis_labels = []
for i in range(1, 6):
x_axis_labels.append(i)
fig, ax = plt.subplots(figsize=(4, 10))
from scipy.ndimage import gaussian_filter
np_smooth = gaussian_filter(df, sigma=0.75)
sns.heatmap(np_smooth, cmap="YlGnBu",
xticklabels=x_axis_labels,
yticklabels=False, cbar=False)
plt.show()
And this is the output:
1
Each row of the heatmap represents a row in the df. I would like to color each cluster in a different color.
like in this photo:
2
I've added the following code but it gives me an error. Would be happy for some help!
The additional code:
cmaps = {'1': 'Blues_r', '2': 'Greens_r', '3': 'Blues_r', '4': 'Greens_r', '5': 'Blues_r', '6': 'Greens_r', '7': 'Blues_r','0': 'Greens_r'}
for clus, cmap in cmaps.items():
mask = df.apply(lambda x: x if x['cluster'] == int(clus) else 0, result_type='broadcast',
axis=1).eq(0)
sns.heatmap(np_smooth, mask=mask, cmap=cmap, xticklabels=x_axis_labels, yticklabels=False,
cbar=False, ax=ax)
plt.show()
The error:
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~\Anaconda3\lib\site-packages\numexpr\necompiler.py in evaluate(ex, local_dict, global_dict, out, order, casting, **kwargs)
826 try:
--> 827 compiled_ex = _numexpr_cache[numexpr_key]
828 except KeyError:
KeyError: ('a_value | b_value', (('optimization', 'aggressive'), ('truediv', True)), (('a_value', <class 'numpy.float64'>), ('b_value', <class 'bool'>)))
During handling of the above exception, another exception occurred:
NotImplementedError Traceback (most recent call last)
<ipython-input-191-2f3e7e4bc9aa> in cluster_heatmap(df, plot_name, base_num)
24 ).eq(0)
25 # plot masked heatmap on reusable ax
---> 26 sns.heatmap(np_smooth, mask=mask, cmap=cmap, ax=ax, xticklabels=x_axis_labels, yticklabels=False, cbar=False)
27
28
~\Anaconda3\lib\site-packages\seaborn\_decorators.py in inner_f(*args, **kwargs)
44 )
45 kwargs.update({k: arg for k, arg in zip(sig.parameters, args)})
---> 46 return f(**kwargs)
47 return inner_f
48
~\Anaconda3\lib\site-packages\seaborn\matrix.py in heatmap(data, vmin, vmax, cmap, center, robust, annot, fmt, annot_kws, linewidths, linecolor, cbar, cbar_kws, cbar_ax, square, xticklabels, yticklabels, mask, ax, **kwargs)
540 plotter = _HeatMapper(data, vmin, vmax, cmap, center, robust, annot, fmt,
541 annot_kws, cbar, cbar_kws, xticklabels,
--> 542 yticklabels, mask)
543
544 # Add the pcolormesh kwargs here
~\Anaconda3\lib\site-packages\seaborn\matrix.py in __init__(self, data, vmin, vmax, cmap, center, robust, annot, fmt, annot_kws, cbar, cbar_kws, xticklabels, yticklabels, mask)
107
108 # Validate the mask and convet to DataFrame
--> 109 mask = _matrix_mask(data, mask)
110
111 plot_data = np.ma.masked_where(np.asarray(mask), plot_data)
~\Anaconda3\lib\site-packages\seaborn\matrix.py in _matrix_mask(data, mask)
86 # This works around an issue where `plt.pcolormesh` doesn't represent
87 # missing data properly
---> 88 mask = mask | pd.isnull(data)
89
90 return mask
~\Anaconda3\lib\site-packages\pandas\core\ops.py in f(self, other, axis, level, fill_value)
2021 # Another DataFrame
2022 pass_op = op if should_series_dispatch(self, other, op) else na_op
-> 2023 return self._combine_frame(other, pass_op, fill_value, level)
2024 elif isinstance(other, ABCSeries):
2025 # For these values of `axis`, we end up dispatching to Series op,
~\Anaconda3\lib\site-packages\pandas\core\frame.py in _combine_frame(self, other, func, fill_value, level)
5086 if ops.should_series_dispatch(this, other, func):
5087 # iterate over columns
-> 5088 return ops.dispatch_to_series(this, other, _arith_op)
5089 else:
5090 result = _arith_op(this.values, other.values)
~\Anaconda3\lib\site-packages\pandas\core\ops.py in dispatch_to_series(left, right, func, str_rep, axis)
1155 raise NotImplementedError(right)
1156
-> 1157 new_data = expressions.evaluate(column_op, str_rep, left, right)
1158
1159 result = left._constructor(new_data, index=left.index, copy=False)
~\Anaconda3\lib\site-packages\pandas\core\computation\expressions.py in evaluate(op, op_str, a, b, use_numexpr, **eval_kwargs)
206 use_numexpr = use_numexpr and _bool_arith_check(op_str, a, b)
207 if use_numexpr:
--> 208 return _evaluate(op, op_str, a, b, **eval_kwargs)
209 return _evaluate_standard(op, op_str, a, b)
210
~\Anaconda3\lib\site-packages\pandas\core\computation\expressions.py in _evaluate_numexpr(op, op_str, a, b, truediv, reversed, **eval_kwargs)
121
122 if result is None:
--> 123 result = _evaluate_standard(op, op_str, a, b)
124
125 return result
~\Anaconda3\lib\site-packages\pandas\core\computation\expressions.py in _evaluate_standard(op, op_str, a, b, **eval_kwargs)
66 _store_test_result(False)
67 with np.errstate(all='ignore'):
---> 68 return op(a, b)
69
70
~\Anaconda3\lib\site-packages\pandas\core\ops.py in column_op(a, b)
1133 def column_op(a, b):
1134 return {i: func(a.iloc[:, i], b.iloc[:, i])
-> 1135 for i in range(len(a.columns))}
1136
1137 elif isinstance(right, ABCSeries) and axis == "columns":
~\Anaconda3\lib\site-packages\pandas\core\ops.py in <dictcomp>(.0)
1133 def column_op(a, b):
1134 return {i: func(a.iloc[:, i], b.iloc[:, i])
-> 1135 for i in range(len(a.columns))}
1136
1137 elif isinstance(right, ABCSeries) and axis == "columns":
~\Anaconda3\lib\site-packages\pandas\core\frame.py in _arith_op(left, right)
5082 # left._binop(right, func, fill_value=fill_value)
5083 left, right = ops.fill_binop(left, right, fill_value)
-> 5084 return func(left, right)
5085
5086 if ops.should_series_dispatch(this, other, func):
~\Anaconda3\lib\site-packages\pandas\core\ops.py in na_op(x, y)
1999
2000 try:
-> 2001 result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs)
2002 except TypeError:
2003 result = masked_arith_op(x, y, op)
~\Anaconda3\lib\site-packages\pandas\core\computation\expressions.py in evaluate(op, op_str, a, b, use_numexpr, **eval_kwargs)
206 use_numexpr = use_numexpr and _bool_arith_check(op_str, a, b)
207 if use_numexpr:
--> 208 return _evaluate(op, op_str, a, b, **eval_kwargs)
209 return _evaluate_standard(op, op_str, a, b)
210
~\Anaconda3\lib\site-packages\pandas\core\computation\expressions.py in _evaluate_numexpr(op, op_str, a, b, truediv, reversed, **eval_kwargs)
112 'b_value': b_value},
113 casting='safe', truediv=truediv,
--> 114 **eval_kwargs)
115 except ValueError as detail:
116 if 'unknown type object' in str(detail):
~\Anaconda3\lib\site-packages\numexpr\necompiler.py in evaluate(ex, local_dict, global_dict, out, order, casting, **kwargs)
827 compiled_ex = _numexpr_cache[numexpr_key]
828 except KeyError:
--> 829 compiled_ex = _numexpr_cache[numexpr_key] = NumExpr(ex, signature, **context)
830 kwargs = {'out': out, 'order': order, 'casting': casting,
831 'ex_uses_vml': ex_uses_vml}
~\Anaconda3\lib\site-packages\numexpr\necompiler.py in NumExpr(ex, signature, **kwargs)
624
625 context = getContext(kwargs, frame_depth=1)
--> 626 threeAddrProgram, inputsig, tempsig, constants, input_names = precompile(ex, signature, context)
627 program = compileThreeAddrForm(threeAddrProgram)
628 return interpreter.NumExpr(inputsig.encode('ascii'),
~\Anaconda3\lib\site-packages\numexpr\necompiler.py in precompile(ex, signature, context)
569 ast = ASTNode('op', value='copy', astKind=ex.astKind, children=(ast,))
570
--> 571 ast = typeCompileAst(ast)
572
573 aliases = collapseDuplicateSubtrees(ast)
~\Anaconda3\lib\site-packages\numexpr\necompiler.py in typeCompileAst(ast)
212 raise NotImplementedError(
213 "couldn't find matching opcode for '%s'"
--> 214 % (ast.value + '_' + retsig + basesig))
215 # First just cast constants, then cast variables if necessary:
216 for i, (have, want) in enumerate(zip(basesig, sig)):
NotImplementedError: couldn't find matching opcode for 'or_bdb'
A dataframe as mask seems to give some errors. You could use mask.values to just grab the values.
The example code below makes the following changes:
the columns of the dataframe are changed from integer to float
the clusters column is left out for the gaussian_filter and for the mask
the mask is calculated by repeating the mask value for the clusters column
the directionary for the colormaps now uses numbers as keys
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
df = pd.DataFrame(np.random.randint(0, 2, size=(10, 5)).astype(float),
columns=['1', '2', '3', '4', '5'])
cluster = [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]
df['cluster'] = cluster
x_axis_labels = range(1, 6)
fig, ax = plt.subplots(figsize=(4, 10))
from scipy.ndimage import gaussian_filter
np_smooth = gaussian_filter(df[df.columns[:-1]], sigma=0.75)
cmaps = {1: 'Reds_r', 2: 'Greys_r', 3: 'Blues_r', 4: 'Greens_r', 5: 'Purples_r', 6: 'Greens_r', 7: 'Blues_r',
0: 'Greens_r'}
for clus, cmap in cmaps.items():
mask = np.repeat((df['cluster'] != int(clus)).values.reshape(-1, 1), len(df.columns) - 1, 1)
if not mask.all():
sns.heatmap(np_smooth, mask=mask, cmap=cmap, xticklabels=x_axis_labels, yticklabels=False,
cbar=False, ax=ax)
plt.tight_layout()
plt.show()
PS: To have consistent color ranges, you might use vmin=np_smooth.min(), vmax=np_smooth.max() in the calls to sns.heatmap().

Why Exception Handling doesn't print text?

My question is why Python doesn't execute the print statement in the Exception Handling code below. I am trying to calculate the log of volumes for a bunch of stocks. Each stock has 1259 volume values. But Python generates a RunTimeWarning "divide by zero encountered in log". So I try to use Exception Handling to locate where the log input is zero, but Python doesn't execute the print statement under except. The print statement is supposed to print the name of the stock and the index in the array where the volume is zero. Why?
Here is the code:
for i, stock in enumerate(df.columns):
volumes = df[stock].to_numpy()
for r in range(len(volumes)): # len(volumes) = 1259
try:
v = np.log(volumes[r])
except:
print(stock, r)
Here is the Error that follows after the RunTimeWarning.
LinAlgError Traceback (most recent call last)
<ipython-input-6-6aa283671e2c> in <module>
13 closes = df_close[stock].to_numpy()
14 volumes = df_vol[stock].to_numpy()
---> 15 indicator_values_all_stocks[i] = indicator.price_volume_fit(volumes, closes, histLength)
16
17 indicator_values_all_stocks_no_NaN = indicator_values_all_stocks[:, ~np.isnan(indicator_values_all_stocks).any(axis=0)]
~\Desktop\Python Projects Organized\Finance\Indicator Statistics\B.57. Price Volume Fit\indicator.py in price_volume_fit(volumes, closes, histLength)
1259 x = log_volumes[i - histLength:i]
1260 y = log_prices[i - histLength:i]
-> 1261 model = np.polyfit(x, y, 1, full = True)
1262 slope[i] = model[0][0]
1263
<__array_function__ internals> in polyfit(*args, **kwargs)
c:\users\donald seger\miniconda3\envs\tensorflow\lib\site-packages\numpy\lib\polynomial.py in polyfit(x, y, deg, rcond, full, w, cov)
629 scale = NX.sqrt((lhs*lhs).sum(axis=0))
630 lhs /= scale
--> 631 c, resids, rank, s = lstsq(lhs, rhs, rcond)
632 c = (c.T/scale).T # broadcast scale coefficients
633
<__array_function__ internals> in lstsq(*args, **kwargs)
c:\users\donald seger\miniconda3\envs\tensorflow\lib\site-packages\numpy\linalg\linalg.py in lstsq(a, b, rcond)
2257 # lapack can't handle n_rhs = 0 - so allocate the array one larger in that axis
2258 b = zeros(b.shape[:-2] + (m, n_rhs + 1), dtype=b.dtype)
-> 2259 x, resids, rank, s = gufunc(a, b, rcond, signature=signature, extobj=extobj)
2260 if m == 0:
2261 x[...] = 0
c:\users\donald seger\miniconda3\envs\tensorflow\lib\site-packages\numpy\linalg\linalg.py in _raise_linalgerror_lstsq(err, flag)
107
108 def _raise_linalgerror_lstsq(err, flag):
--> 109 raise LinAlgError("SVD did not converge in Linear Least Squares")
110
111 def get_linalg_error_extobj(callback):
LinAlgError: SVD did not converge in Linear Least Squares

" ArityMismatch: Adding expressions with non-matching form arguments () vs ('v_1',) " using FEniCS

I want to solve a continuum mechanics problem thanks to FEniCS. I apply pressure and take into account the weight. But when I add the thermoelasticity component, it doesn't work anymore.
Here is my code :
from dolfin import *
from fenics import *
from ufl import nabla_div
from ufl import as_tensor
import matplotlib.pyplot as plt
import numpy as np
E = Constant(100*10**9)
nu = Constant(0.3)
Lg = 0.01; W = 0.2
mu = E/(2+2*nu)
rho = Constant(2200)
delta = W/Lg
gamma = 0.4*delta**2
beta = 8
lambda_ = (E*nu)/((1+nu)*(1-2*nu))
alpha = 1.2*(10**(-8))
deltaT = Constant(50)
Kt = E*alph*deltaT/(1-2*nu)
g = 9.81
tol = 1E-14
# Create mesh and define function space
mesh = RectangleMesh(Point(-2., 0.),Point(2., 10.), 80, 200)
V = VectorFunctionSpace(mesh, "P", 1)
# Define boundary condition
def clamped_boundary(x, on_boundary):
return on_boundary and x[1] < tol
class UpFace(SubDomain):
def inside(self, x, on_boundary):
return on_boundary and (x[1] > 10 - tol)
ueN = UpFace()
boundaries = MeshFunction("size_t", mesh, mesh.topology().dim()-1, 0)
ueN.mark(boundaries, 1)
ds = Measure("ds", domain=mesh, subdomain_data=boundaries)
bc = DirichletBC(V, Constant((0, 0)), clamped_boundary)
def epsilon(u):
return 0.5*(nabla_grad(u) + nabla_grad(u).T)
def sigma(u):
return (lambda_*nabla_div(u) - Kt)*Identity(d) + (2*mu)*epsilon(u)
# Define variational problem
u = TrialFunction(V)
d = u.geometric_dimension() # space dimension
v = TestFunction(V)
f = Constant((0,-rho*g))
T = Constant((0, 0))
Pr = Constant((0, -2*10**9))
a = inner(sigma(u), epsilon(v))*dx
L = dot(f, v)*dx + dot(T, v)*ds + dot(Pr,v)*ds(1)
# Compute solution
u = Function(V)
solve(a == L, u, bc)
# Plot solution
plot(u, mode="displacement", color= "red")
plt.colorbar(plot(u))
I get this error message :
---------------------------------------------------------------------------
ArityMismatch Traceback (most recent call last)
<ipython-input-54-805d7c5b704f> in <module>
17 # Compute solution
18 u = Function(V)
---> 19 solve(a == L, u, bc)
20
21 # Plot solution
/usr/lib/python3/dist-packages/dolfin/fem/solving.py in solve(*args, **kwargs)
218 # tolerance)
219 elif isinstance(args[0], ufl.classes.Equation):
--> 220 _solve_varproblem(*args, **kwargs)
221
222 # Default case, just call the wrapped C++ solve function
/usr/lib/python3/dist-packages/dolfin/fem/solving.py in _solve_varproblem(*args, **kwargs)
240 # Create problem
241 problem = LinearVariationalProblem(eq.lhs, eq.rhs, u, bcs,
--> 242 form_compiler_parameters=form_compiler_parameters)
243
244 # Create solver and call solve
/usr/lib/python3/dist-packages/dolfin/fem/problem.py in __init__(self, a, L, u, bcs, form_compiler_parameters)
54 else:
55 L = Form(L, form_compiler_parameters=form_compiler_parameters)
---> 56 a = Form(a, form_compiler_parameters=form_compiler_parameters)
57
58 # Initialize C++ base class
/usr/lib/python3/dist-packages/dolfin/fem/form.py in __init__(self, form, **kwargs)
42
43 ufc_form = ffc_jit(form, form_compiler_parameters=form_compiler_parameters,
---> 44 mpi_comm=mesh.mpi_comm())
45 ufc_form = cpp.fem.make_ufc_form(ufc_form[0])
46
/usr/lib/python3/dist-packages/dolfin/jit/jit.py in mpi_jit(*args, **kwargs)
45 # Just call JIT compiler when running in serial
46 if MPI.size(mpi_comm) == 1:
---> 47 return local_jit(*args, **kwargs)
48
49 # Default status (0 == ok, 1 == fail)
/usr/lib/python3/dist-packages/dolfin/jit/jit.py in ffc_jit(ufl_form, form_compiler_parameters)
95 p.update(dict(parameters["form_compiler"]))
96 p.update(form_compiler_parameters or {})
---> 97 return ffc.jit(ufl_form, parameters=p)
98
99
/usr/lib/python3/dist-packages/ffc/jitcompiler.py in jit(ufl_object, parameters, indirect)
215
216 # Inspect cache and generate+build if necessary
--> 217 module = jit_build(ufl_object, module_name, parameters)
218
219 # Raise exception on failure to build or import module
/usr/lib/python3/dist-packages/ffc/jitcompiler.py in jit_build(ufl_object, module_name, parameters)
131 name=module_name,
132 params=params,
--> 133 generate=jit_generate)
134 return module
135
/usr/lib/python3/dist-packages/dijitso/jit.py in jit(jitable, name, params, generate, send, receive, wait)
163 elif generate:
164 # 1) Generate source code
--> 165 header, source, dependencies = generate(jitable, name, signature, params["generator"])
166 # Ensure we got unicode from generate
167 header = as_unicode(header)
/usr/lib/python3/dist-packages/ffc/jitcompiler.py in jit_generate(ufl_object, module_name, signature, parameters)
64
65 code_h, code_c, dependent_ufl_objects = compile_object(ufl_object,
---> 66 prefix=module_name, parameters=parameters, jit=True)
67
68 # Jit compile dependent objects separately,
/usr/lib/python3/dist-packages/ffc/compiler.py in compile_form(forms, object_names, prefix, parameters, jit)
141 """This function generates UFC code for a given UFL form or list of UFL forms."""
142 return compile_ufl_objects(forms, "form", object_names,
--> 143 prefix, parameters, jit)
144
145
/usr/lib/python3/dist-packages/ffc/compiler.py in compile_ufl_objects(ufl_objects, kind, object_names, prefix, parameters, jit)
183 # Stage 1: analysis
184 cpu_time = time()
--> 185 analysis = analyze_ufl_objects(ufl_objects, kind, parameters)
186 _print_timing(1, time() - cpu_time)
187
/usr/lib/python3/dist-packages/ffc/analysis.py in analyze_ufl_objects(ufl_objects, kind, parameters)
88 # Analyze forms
89 form_datas = tuple(_analyze_form(form, parameters)
---> 90 for form in forms)
91
92 # Extract unique elements accross all forms
/usr/lib/python3/dist-packages/ffc/analysis.py in <genexpr>(.0)
88 # Analyze forms
89 form_datas = tuple(_analyze_form(form, parameters)
---> 90 for form in forms)
91
92 # Extract unique elements accross all forms
/usr/lib/python3/dist-packages/ffc/analysis.py in _analyze_form(form, parameters)
172 do_apply_geometry_lowering=True,
173 preserve_geometry_types=(Jacobian,),
--> 174 do_apply_restrictions=True)
175 elif r == "tsfc":
176 try:
/usr/lib/python3/dist-packages/ufl/algorithms/compute_form_data.py in compute_form_data(form, do_apply_function_pullbacks, do_apply_integral_scaling, do_apply_geometry_lowering, preserve_geometry_types, do_apply_default_restrictions, do_apply_restrictions, do_estimate_degrees, do_append_everywhere_integrals, complex_mode)
416 preprocessed_form = remove_complex_nodes(preprocessed_form)
417
--> 418 check_form_arity(preprocessed_form, self.original_form.arguments(), complex_mode) # Currently testing how fast this is
419
420 # TODO: This member is used by unit tests, change the tests to
/usr/lib/python3/dist-packages/ufl/algorithms/check_arities.py in check_form_arity(form, arguments, complex_mode)
175 def check_form_arity(form, arguments, complex_mode=False):
176 for itg in form.integrals():
--> 177 check_integrand_arity(itg.integrand(), arguments, complex_mode)
/usr/lib/python3/dist-packages/ufl/algorithms/check_arities.py in check_integrand_arity(expr, arguments, complex_mode)
157 key=lambda x: (x.number(), x.part())))
158 rules = ArityChecker(arguments)
--> 159 arg_tuples = map_expr_dag(rules, expr, compress=False)
160 args = tuple(a[0] for a in arg_tuples)
161 if args != arguments:
/usr/lib/python3/dist-packages/ufl/corealg/map_dag.py in map_expr_dag(function, expression, compress)
35 Return the result of the final function call.
36 """
---> 37 result, = map_expr_dags(function, [expression], compress=compress)
38 return result
39
/usr/lib/python3/dist-packages/ufl/corealg/map_dag.py in map_expr_dags(function, expressions, compress)
84 r = handlers[v._ufl_typecode_](v)
85 else:
---> 86 r = handlers[v._ufl_typecode_](v, *[vcache[u] for u in v.ufl_operands])
87
88 # Optionally check if r is in rcache, a memory optimization
/usr/lib/python3/dist-packages/ufl/algorithms/check_arities.py in sum(self, o, a, b)
46 def sum(self, o, a, b):
47 if a != b:
---> 48 raise ArityMismatch("Adding expressions with non-matching form arguments {0} vs {1}.".format(_afmt(a), _afmt(b)))
49 return a
50
ArityMismatch: Adding expressions with non-matching form arguments () vs ('v_1',).
When I write this (I remove the Kt from sigma(u)):
def sigma(u):
return (lambda_*nabla_div(u))*Identity(d) + (2*mu)*epsilon(u)
It works perfectly.
In this page (Click here), they try to plot the same kind problem and it works on my computer.
Do you know how to fix it ?
I had exactly the same question and a colleague of mine did figure it out for me. As there is no answer given here, I will try to leave some directions to guide others to the solution. I have not a lot of expertise yet, so please consider that my use of terminology might be a little bit off.
The error of fenics somewhat mislead me into thinking the error is in the definition of the stress term sigma. It is not exactly there. The right handside and the left handside in the solve function are not defined correctly (also shown in the very top of the error code). The term kT*Identity(d) in the stress function sigma, is not dependent on the trialfunction u. It is just multiplied by the testfunction v later (epsilon(v)). Therefore it has to go into the L of the equation of the solver.
Beneath the Link that you shared, the scipt uses the rhs and lhs function to correctly split the equation into a and L.

Python 3; MatplotLib ; Box Plot Error

I am new to python/ pandas and trying to create a boxplot using the iris data set.
Here is my code.:
import pandas as pd
iris_filename = '/Users/pro/Documents/Code/Data Science/Iris/IRIS.csv'
iris = pd.read_csv(iris_filename, header = None,
names= ['sepal_lenght','sepal_width','petal_lenght','petal_width','target'])
plt.boxplot(iris)
I get this error:
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-20-e190e88674b0> in <module>()
----> 1 plt.boxplot(iris)
/anaconda/lib/python3.5/site-packages/matplotlib/pyplot.py in boxplot(x, notch, sym, vert, whis, positions, widths, patch_artist, bootstrap, usermedians, conf_intervals, meanline, showmeans, showcaps, showbox, showfliers, boxprops, labels, flierprops, medianprops, meanprops, capprops, whiskerprops, manage_xticks, autorange, zorder, hold, data)
2784 whiskerprops=whiskerprops,
2785 manage_xticks=manage_xticks, autorange=autorange,
-> 2786 zorder=zorder, data=data)
2787 finally:
2788 ax._hold = washold
/anaconda/lib/python3.5/site-packages/matplotlib/__init__.py in inner(ax, *args, **kwargs)
1890 warnings.warn(msg % (label_namer, func.__name__),
1891 RuntimeWarning, stacklevel=2)
-> 1892 return func(ax, *args, **kwargs)
1893 pre_doc = inner.__doc__
1894 if pre_doc is None:
/anaconda/lib/python3.5/site-packages/matplotlib/axes/_axes.py in boxplot(self, x, notch, sym, vert, whis, positions, widths, patch_artist, bootstrap, usermedians, conf_intervals, meanline, showmeans, showcaps, showbox, showfliers, boxprops, labels, flierprops, medianprops, meanprops, capprops, whiskerprops, manage_xticks, autorange, zorder)
3266 bootstrap = rcParams['boxplot.bootstrap']
3267 bxpstats = cbook.boxplot_stats(x, whis=whis, bootstrap=bootstrap,
-> 3268 labels=labels, autorange=autorange)
3269 if notch is None:
3270 notch = rcParams['boxplot.notch']
/anaconda/lib/python3.5/site-packages/matplotlib/cbook.py in boxplot_stats(X, whis, bootstrap, labels, autorange)
1984
1985 # convert X to a list of lists
-> 1986 X = _reshape_2D(X)
1987
1988 ncols = len(X)
/anaconda/lib/python3.5/site-packages/matplotlib/cbook.py in _reshape_2D(X)
2245 X = [X.ravel()]
2246 else:
-> 2247 X = [X[:, i] for i in xrange(ncols)]
2248 else:
2249 raise ValueError("input `X` must have 2 or fewer dimensions")
/anaconda/lib/python3.5/site-packages/matplotlib/cbook.py in <listcomp>(.0)
2245 X = [X.ravel()]
2246 else:
-> 2247 X = [X[:, i] for i in xrange(ncols)]
2248 else:
2249 raise ValueError("input `X` must have 2 or fewer dimensions")
/anaconda/lib/python3.5/site-packages/pandas/core/frame.py in __getitem__(self, key)
2057 return self._getitem_multilevel(key)
2058 else:
-> 2059 return self._getitem_column(key)
2060
2061 def _getitem_column(self, key):
/anaconda/lib/python3.5/site-packages/pandas/core/frame.py in _getitem_column(self, key)
2064 # get column
2065 if self.columns.is_unique:
-> 2066 return self._get_item_cache(key)
2067
2068 # duplicate columns & possible reduce dimensionality
/anaconda/lib/python3.5/site-packages/pandas/core/generic.py in _get_item_cache(self, item)
1382 """Return the cached item, item represents a label indexer."""
1383 cache = self._item_cache
-> 1384 res = cache.get(item)
1385 if res is None:
1386 values = self._data.get(item)
TypeError: unhashable type: 'slice'
I haae searched the web for this and cannot seem to find answer for this issue. I will appreciate any help on this.
You are calling matplotlib to boxplot the DataFrame iris... as you are already using Pandas for importing the .csv you should also use it for plotting:
iris.boxplot()
Pandas boxplot api

Using the natural language toolkit in Jupyter notebook

Hello I am am trying to use the nltk to tokenize and generate some pos tags but I get error response in spite of of importing the nltk
bs=BeautifulSoup(web.text, 'html.parser')
print (bs)
tokes=nltk.word_tokenize (bs)
tags= nltk.pos_tag(tokes)
TypeError Traceback (most recent call last)
<ipython-input-71-f1434047d3f5> in <module>()
1 bs=BeautifulSoup(web.text, 'html.parser')
2 print (bs)
----> 3 tokes=nltk.word_tokenize (bs)
4 tags= nltk.pos_tag(tokes)
5 tags
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\__init__.py in word_tokenize(text, language)
104 :param language: the model name in the Punkt corpus
105 """
--> 106 return [token for sent in sent_tokenize(text, language)
107 for token in _treebank_word_tokenize(sent)]
108
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\__init__.py in sent_tokenize(text, language)
89 """
90 tokenizer = load('tokenizers/punkt/{0}.pickle'.format(language))
---> 91 return tokenizer.tokenize(text)
92
93 # Standard word tokenizer.
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in tokenize(self, text, realign_boundaries)
1224 Given a text, returns a list of the sentences in that text.
1225 """
-> 1226 return list(self.sentences_from_text(text, realign_boundaries))
1227
1228 def debug_decisions(self, text):
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in sentences_from_text(self, text, realign_boundaries)
1272 follows the period.
1273 """
-> 1274 return [text[s:e] for s, e in self.span_tokenize(text, realign_boundaries)]
1275
1276 def _slices_from_text(self, text):
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in span_tokenize(self, text, realign_boundaries)
1263 if realign_boundaries:
1264 slices = self._realign_boundaries(text, slices)
-> 1265 return [(sl.start, sl.stop) for sl in slices]
1266
1267 def sentences_from_text(self, text, realign_boundaries=True):
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in <listcomp>(.0)
1263 if realign_boundaries:
1264 slices = self._realign_boundaries(text, slices)
-> 1265 return [(sl.start, sl.stop) for sl in slices]
1266
1267 def sentences_from_text(self, text, realign_boundaries=True):
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in _realign_boundaries(self, text, slices)
1302 """
1303 realign = 0
-> 1304 for sl1, sl2 in _pair_iter(slices):
1305 sl1 = slice(sl1.start + realign, sl1.stop)
1306 if not sl2:
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in _pair_iter(it)
308 """
309 it = iter(it)
--> 310 prev = next(it)
311 for el in it:
312 yield (prev, el)
C:\Users\DESDEJEI\Anaconda3\lib\site-packages\nltk\tokenize\punkt.py in _slices_from_text(self, text)
1276 def _slices_from_text(self, text):
1277 last_break = 0
-> 1278 for match in self._lang_vars.period_context_re().finditer(text):
1279 context = match.group() + match.group('after_tok')
1280 if self.text_contains_sentbreak(context):
TypeError: expected string or bytes-like object
could anyone help me figure out where exactly i may have gone wrong with my syntax?
You're passing bs to the tokenize function when you should be passing bs.text

Resources