Skip to content

Commit b41df90

Browse files
authored
Merge pull request #14 from ucl-bug/dependabot/pip/jax-0.2.25
Bump jax from 0.2.21 to 0.2.25
2 parents cf30b07 + 9583530 commit b41df90

8 files changed

+58
-58
lines changed

_setup/requirements.txt

100755100644
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
jax==0.2.21
1+
jax==0.2.25
22
jaxlib==0.1.71
33
hashids==1.3.1

docs/notebooks/customizing_discretizations.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -525,7 +525,7 @@
525525
],
526526
"source": [
527527
"from jax import numpy as jnp\n",
528-
"from jax.experimental import stax\n",
528+
"from jax.example_libraries import stax\n",
529529
"\n",
530530
"init_random_params, predict = stax.serial(\n",
531531
" stax.Dense(1024), stax.Relu,\n",
@@ -573,7 +573,7 @@
573573
],
574574
"source": [
575575
"# Reusing the operator with a neural network discretization\n",
576-
"from jax.experimental import stax\n",
576+
"from jax.example_libraries import stax\n",
577577
"from functools import partial\n",
578578
"from jaxdf.discretization import Arbitrary\n",
579579
"\n",

docs/notebooks/example_1_paper.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@
121121
"from jaxdf.utils import join_dicts\n",
122122
"from jax import numpy as jnp\n",
123123
"from jax.scipy.sparse.linalg import gmres\n",
124-
"from jax.experimental import optimizers\n",
124+
"from jax.example_libraries import optimizers\n",
125125
"import jax\n",
126126
"\n",
127127
"# Settings\n",

docs/notebooks/helmholtz_pinn.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@
164164
"outputs": [],
165165
"source": [
166166
"from jaxdf.discretization import Arbitrary, UniformField\n",
167-
"from jax.experimental import stax\n",
167+
"from jax.example_libraries import stax\n",
168168
"from jax import random\n",
169169
"\n",
170170
"seed = random.PRNGKey(42)\n",
@@ -354,7 +354,7 @@
354354
],
355355
"source": [
356356
"# Training loop\n",
357-
"from jax.experimental import optimizers\n",
357+
"from jax.example_libraries import optimizers\n",
358358
"from jax.tree_util import tree_multimap\n",
359359
"from jax import jit\n",
360360
"from tqdm import tqdm\n",

docs/notebooks/learn_fd_kernel.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@
283283
"metadata": {},
284284
"outputs": [],
285285
"source": [
286-
"from jax.experimental import optimizers\n",
286+
"from jax.example_libraries import optimizers\n",
287287
"\n",
288288
"init_fun, update_fun, get_params = optimizers.adam(.001, b1=0.9, b2=0.9)\n",
289289
"opt_state = init_fun(jnp.asarray(k))"

docs/notebooks/pinn_burgers.ipynb

Lines changed: 24 additions & 24 deletions
Large diffs are not rendered by default.

docs/notebooks/quickstart.ipynb

Lines changed: 26 additions & 26 deletions
Large diffs are not rendered by default.

docs/notebooks/simulate_helmholtz_equation.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -872,7 +872,7 @@
872872
}
873873
],
874874
"source": [
875-
"from jax.experimental import optimizers\n",
875+
"from jax.example_libraries import optimizers\n",
876876
"from jax import jit\n",
877877
"from tqdm import tqdm\n",
878878
"\n",

0 commit comments

Comments
 (0)