Skip to content

Commit

Permalink
Requirements update (#63)
Browse files Browse the repository at this point in the history
* Requirements and doc update

* Sort

* Update notebook too
  • Loading branch information
Douwe Osinga authored Jul 22, 2019
1 parent 9130b78 commit 04f56a7
Show file tree
Hide file tree
Showing 5 changed files with 173 additions and 78 deletions.
19 changes: 18 additions & 1 deletion 05.1 Generating Text in the Style of an Example Text.ipynb
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Installing gutenberg is tricky, so it is not included in the requirements.txt\n",
"# For it to work we need a berkelydb version <= 6 for licensing reasons. On OSX\n",
"# using brew you can do:\n",
"# brew install berkeley-db@4\n",
"\n",
"!pip install gutenberg\n",
"\n",
"# If this doesn't work, this notebook should still run, just not fetching data\n",
"# from gutenberg."
]
},
{
"cell_type": "code",
"execution_count": 14,
Expand Down Expand Up @@ -1633,7 +1650,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
"version": "3.7.3"
}
},
"nbformat": 4,
Expand Down
71 changes: 70 additions & 1 deletion 08.2 Import Gutenberg.ipynb
Original file line number Diff line number Diff line change
@@ -1,5 +1,74 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Collecting gutenberg\n",
" Using cached https://files.pythonhosted.org/packages/14/b1/6e99867c38e70d46366966a0a861c580377f38312cf9dbad38b82ed1823d/Gutenberg-0.7.0.tar.gz\n",
"Collecting bsddb3>=6.1.0 (from gutenberg)\n",
" Using cached https://files.pythonhosted.org/packages/e9/fc/ebfbd4de236b493f9ece156f816c21df0ae87ccc22604c5f9b664efef1b9/bsddb3-6.2.6.tar.gz\n",
"Collecting future>=0.15.2 (from gutenberg)\n",
"Collecting rdflib-sqlalchemy>=0.3.8 (from gutenberg)\n",
" Using cached https://files.pythonhosted.org/packages/92/a2/bc580a51ac1f9680aa04da4b6e96d499903d6e606d2f78f02e73527799da/rdflib_sqlalchemy-0.3.8-py3-none-any.whl\n",
"Collecting rdflib>=4.2.0 (from gutenberg)\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/3c/fe/630bacb652680f6d481b9febbb3e2c3869194a1a5fc3401a4a41195a2f8f/rdflib-4.2.2-py3-none-any.whl (344kB)\n",
"\u001b[K |████████████████████████████████| 348kB 4.0MB/s eta 0:00:01\n",
"\u001b[?25hRequirement already satisfied: requests>=2.5.1 in ./venv3/lib/python3.7/site-packages (from gutenberg) (2.22.0)\n",
"Requirement already satisfied: setuptools>=18.5 in ./venv3/lib/python3.7/site-packages (from gutenberg) (41.0.1)\n",
"Requirement already satisfied: six>=1.10.0 in ./venv3/lib/python3.7/site-packages (from gutenberg) (1.12.0)\n",
"Collecting SQLAlchemy>=1.1.4 (from rdflib-sqlalchemy>=0.3.8->gutenberg)\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/55/98/56b7155bab287cd0c78dee26258835db36e91f2efef41f125ed6f6f1f334/SQLAlchemy-1.3.6.tar.gz (5.9MB)\n",
"\u001b[K |████████████████████████████████| 5.9MB 22.9MB/s eta 0:00:01\n",
"\u001b[?25hCollecting alembic>=0.8.8 (from rdflib-sqlalchemy>=0.3.8->gutenberg)\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/7b/8b/0c98c378d93165d9809193f274c3c6e2151120d955b752419c7d43e4d857/alembic-1.0.11.tar.gz (1.0MB)\n",
"\u001b[K |████████████████████████████████| 1.0MB 27.3MB/s eta 0:00:01\n",
"\u001b[?25hRequirement already satisfied: pyparsing in ./venv3/lib/python3.7/site-packages (from rdflib>=4.2.0->gutenberg) (2.4.1)\n",
"Collecting isodate (from rdflib>=4.2.0->gutenberg)\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/9b/9f/b36f7774ff5ea8e428fdcfc4bb332c39ee5b9362ddd3d40d9516a55221b2/isodate-0.6.0-py2.py3-none-any.whl (45kB)\n",
"\u001b[K |████████████████████████████████| 51kB 8.3MB/s eta 0:00:01\n",
"\u001b[?25hRequirement already satisfied: chardet<3.1.0,>=3.0.2 in ./venv3/lib/python3.7/site-packages (from requests>=2.5.1->gutenberg) (3.0.4)\n",
"Requirement already satisfied: certifi>=2017.4.17 in ./venv3/lib/python3.7/site-packages (from requests>=2.5.1->gutenberg) (2019.6.16)\n",
"Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in ./venv3/lib/python3.7/site-packages (from requests>=2.5.1->gutenberg) (1.25.3)\n",
"Requirement already satisfied: idna<2.9,>=2.5 in ./venv3/lib/python3.7/site-packages (from requests>=2.5.1->gutenberg) (2.8)\n",
"Collecting Mako (from alembic>=0.8.8->rdflib-sqlalchemy>=0.3.8->gutenberg)\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/1b/a5/023aba3d69aacef6bfc13797bdc3dd03c6fb4ae2dcd2fde7dffc37233924/Mako-1.0.14.tar.gz (462kB)\n",
"\u001b[K |████████████████████████████████| 471kB 10.3MB/s eta 0:00:01\n",
"\u001b[?25hCollecting python-editor>=0.3 (from alembic>=0.8.8->rdflib-sqlalchemy>=0.3.8->gutenberg)\n",
" Downloading https://files.pythonhosted.org/packages/c6/d3/201fc3abe391bbae6606e6f1d598c15d367033332bd54352b12f35513717/python_editor-1.0.4-py3-none-any.whl\n",
"Requirement already satisfied: python-dateutil in ./venv3/lib/python3.7/site-packages (from alembic>=0.8.8->rdflib-sqlalchemy>=0.3.8->gutenberg) (2.8.0)\n",
"Requirement already satisfied: MarkupSafe>=0.9.2 in ./venv3/lib/python3.7/site-packages (from Mako->alembic>=0.8.8->rdflib-sqlalchemy>=0.3.8->gutenberg) (1.1.1)\n",
"Building wheels for collected packages: gutenberg, bsddb3, SQLAlchemy, alembic, Mako\n",
" Building wheel for gutenberg (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25h Stored in directory: /Users/douwe/Library/Caches/pip/wheels/8e/cd/75/4bc6f16541a1b7a69b02168da567695b2271c23ac4a0a0a453\n",
" Building wheel for bsddb3 (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25h Stored in directory: /Users/douwe/Library/Caches/pip/wheels/11/b8/b3/fa84db10bf8c563e4ba1a72837a0946d123f12adb34b164bf5\n",
" Building wheel for SQLAlchemy (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25h Stored in directory: /Users/douwe/Library/Caches/pip/wheels/f2/ec/e0/d7deb0c981557e373edf7370574b7001690892afe5fea30c3c\n",
" Building wheel for alembic (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25h Stored in directory: /Users/douwe/Library/Caches/pip/wheels/8b/65/b2/9837b4422d13e739c3324c428f1b3aa9e3c3df666bb420e4b3\n",
" Building wheel for Mako (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25h Stored in directory: /Users/douwe/Library/Caches/pip/wheels/36/0e/1d/4b28d9bc3f835432132648d6d1d038a7b3d9dbfd7df453e0f1\n",
"Successfully built gutenberg bsddb3 SQLAlchemy alembic Mako\n",
"Installing collected packages: bsddb3, future, SQLAlchemy, isodate, rdflib, Mako, python-editor, alembic, rdflib-sqlalchemy, gutenberg\n",
"Successfully installed Mako-1.0.14 SQLAlchemy-1.3.6 alembic-1.0.11 bsddb3-6.2.6 future-0.17.1 gutenberg-0.7.0 isodate-0.6.0 python-editor-1.0.4 rdflib-4.2.2 rdflib-sqlalchemy-0.3.8\n"
]
}
],
"source": [
"# Installing gutenberg is tricky, so it is not included in the requirements.txt\n",
"# For it to work we need a berkelydb version <= 6 for licensing reasons. On OSX\n",
"# using brew you can do:\n",
"# brew install berkeley-db@4\n",
"\n",
"!pip install gutenberg"
]
},
{
"cell_type": "code",
"execution_count": 16,
Expand Down Expand Up @@ -293,7 +362,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
"version": "3.7.3"
}
},
"nbformat": 4,
Expand Down
17 changes: 15 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Deep Learning Cookbook Notebooks

This repository contains 36 python notebooks demonstrating most of the key
This repository contains 35 python notebooks demonstrating most of the key
machine learning techniques in Keras. The notebooks accompany the book
[Deep Learning Cookbook](https://www.amazon.com/Deep-Learning-Cookbook-Practical-Recipes) but work well on their own. A GPU is not required to run them,
but on a mere CPU things will take quite a while.
Expand Down Expand Up @@ -78,9 +78,22 @@ up next to each other; effectively doing what Word2Vec does for words, but now f

#### [08.1 Sequence to sequence mapping](https://github.com/DOsinga/deep_learning_cookbook/blob/master/08.1%20Sequence%20to%20sequence%20mapping.ipynb)
#### [08.2 Import Gutenberg](https://github.com/DOsinga/deep_learning_cookbook/blob/master/08.2%20Import%20Gutenberg.ipynb)
#### [08.3 Subword tokenizing](https://github.com/DOsinga/deep_learning_cookbook/blob/master/08.3%20Subword%20tokenizing.ipynb)

Small notebook demonstrating how to download books from the Gutenberg project. Tokenizes a set of book in preparation of
the subword tokenizing in the next notebook.

#### [09.1 Reusing a pretrained image recognition network](https://github.com/DOsinga/deep_learning_cookbook/blob/master/09.1%20Reusing%20a%20pretrained%20image%20recognition%20network.ipynb)

Quick notebook demonstrating how to load a pretrained network and apply it on an image of, well, what else? a cat.
Shows how to normalize the image and decode the predictions.

#### [09.2 Images as embeddings](https://github.com/DOsinga/deep_learning_cookbook/blob/master/09.2%20Images%20as%20embeddings.ipynb)

In this notebook we use the Flickr API to fetch a feed of search results for the search term cat. By running each result through a pre-trained network
we get vectors that project the images in a 'space'. The center of that space in some way represents the most cat image possible. By reranking on
distance to that center we can weed out images that are less cat like. Effectively we can improve upon the Flickr search results without
knowing the content!

#### [09.3 Retraining](https://github.com/DOsinga/deep_learning_cookbook/blob/master/09.3%20Retraining.ipynb)
#### [10.1 Building an inverse image search service](https://github.com/DOsinga/deep_learning_cookbook/blob/master/10.1%20Building%20an%20inverse%20image%20search%20service.ipynb)
#### [11.1 Detecting Multiple Images](https://github.com/DOsinga/deep_learning_cookbook/blob/master/11.1%20Detecting%20Multiple%20Images.ipynb)
Expand Down
5 changes: 2 additions & 3 deletions requirements.in
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
Pillow
bs4
emoji
flickrapi
gensim
geopandas
gutenberg
h5py
imageio
inflect
Expand All @@ -16,13 +16,12 @@ matplotlib
mwparserfromhell
nltk
notebook
Pillow
plyvel
psycopg2-binary
pyyaml
sklearn
spotipy
svglib
tensorflow
tensorflow==1.14
tqdm
twitter
139 changes: 68 additions & 71 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,22 @@
#
# pip-compile
#
absl-py==0.7.1 # via tensorboard, tensorflow, tensorflow-estimator
alembic==1.0.10 # via rdflib-sqlalchemy
absl-py==0.7.1 # via tensorboard, tensorflow
appnope==0.1.0 # via ipython
args==0.1.0 # via clint
astor==0.7.1 # via tensorflow
astor==0.8.0 # via tensorflow
attrs==19.1.0 # via fiona, jsonschema
audioread==2.1.7 # via librosa
audioread==2.1.8 # via librosa
backcall==0.1.0 # via ipython
backports.csv==1.0.7 # via internetarchive
beautifulsoup4==4.7.1 # via bs4
beautifulsoup4==4.8.0 # via bs4
bleach==3.1.0 # via nbconvert
boto3==1.9.151 # via smart-open
boto3==1.9.192 # via smart-open
boto==2.49.0 # via smart-open
botocore==1.12.151 # via boto3, s3transfer
botocore==1.12.192 # via boto3, s3transfer
bs4==0.0.1
bsddb3==6.2.6 # via gutenberg
certifi==2019.3.9 # via requests
certifi==2019.6.16 # via requests
cffi==1.12.3 # via soundfile
chardet==3.0.4 # via requests
click-plugins==1.1.1 # via fiona
click==7.0 # via click-plugins, cligj, fiona
Expand All @@ -36,113 +36,110 @@ emoji==0.5.2
entrypoints==0.3 # via nbconvert
fiona==1.8.6 # via geopandas
flickrapi==2.4.0
future==0.17.1 # via gutenberg
gast==0.2.2 # via tensorflow
gensim==3.7.3
geopandas==0.5.0
grpcio==1.20.1 # via tensorboard, tensorflow
gutenberg==0.7.0
gensim==3.8.0
geopandas==0.5.1
google-pasta==0.1.7 # via tensorflow
grpcio==1.22.0 # via tensorboard, tensorflow
h5py==2.9.0
idna==2.8 # via requests
imageio==2.5.0
inflect==2.1.0
internetarchive==1.8.4
internetarchive==1.8.5
ipykernel==5.1.1 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole
ipython-genutils==0.2.0 # via nbformat, notebook, qtconsole, traitlets
ipython==7.5.0
ipywidgets==7.4.2 # via jupyter
isodate==0.6.0 # via rdflib
jedi==0.13.3 # via ipython
ipython==7.6.1
ipywidgets==7.5.0 # via jupyter
jedi==0.14.1 # via ipython
jinja2==2.10.1 # via nbconvert, notebook
jmespath==0.9.4 # via boto3, botocore
joblib==0.13.2 # via librosa, scikit-learn
jsonpatch==1.23 # via internetarchive
jsonpointer==2.0 # via jsonpatch
jsonschema==3.0.1 # via nbformat
jupyter-client==5.2.4 # via ipykernel, jupyter-console, notebook, qtconsole
jupyter-client==5.3.1 # via ipykernel, jupyter-console, notebook, qtconsole
jupyter-console==6.0.0 # via jupyter
jupyter-core==4.4.0 # via jupyter-client, nbconvert, nbformat, notebook, qtconsole
jupyter-core==4.5.0 # via jupyter-client, nbconvert, nbformat, notebook, qtconsole
jupyter==1.0.0
keras-applications==1.0.7 # via keras, tensorflow
keras-preprocessing==1.0.9 # via keras, tensorflow
keras-applications==1.0.8 # via keras, tensorflow
keras-preprocessing==1.1.0 # via keras, tensorflow
keras==2.2.4
kiwisolver==1.1.0 # via matplotlib
librosa==0.6.3
llvmlite==0.28.0 # via numba
lxml==4.3.3 # via svglib
mako==1.0.10 # via alembic
markdown==3.1 # via tensorboard
markupsafe==1.1.1 # via jinja2, mako
matplotlib==3.0.3
librosa==0.7.0
llvmlite==0.29.0 # via numba
lxml==4.3.4 # via svglib
markdown==3.1.1 # via tensorboard
markupsafe==1.1.1 # via jinja2
matplotlib==3.1.1
mistune==0.8.4 # via nbconvert
mock==3.0.5 # via tensorflow-estimator
munch==2.3.2 # via fiona
mwparserfromhell==0.5.4
nbconvert==5.5.0 # via jupyter, notebook
nbformat==4.4.0 # via ipywidgets, nbconvert, notebook
nltk==3.4.1
notebook==5.7.8
numba==0.43.1 # via librosa, resampy
numpy==1.16.3 # via gensim, h5py, imageio, keras, keras-applications, keras-preprocessing, librosa, matplotlib, numba, pandas, resampy, scikit-learn, scipy, tensorboard, tensorflow, tensorflow-estimator
oauthlib==3.0.1 # via requests-oauthlib
pandas==0.24.2 # via geopandas
nltk==3.4.4
notebook==6.0.0
numba==0.45.0 # via librosa, resampy
numpy==1.16.4 # via gensim, h5py, imageio, keras, keras-applications, keras-preprocessing, librosa, matplotlib, numba, pandas, resampy, scikit-learn, scipy, tensorboard, tensorflow
oauthlib==3.0.2 # via requests-oauthlib
pandas==0.25.0 # via geopandas
pandocfilters==1.4.2 # via nbconvert
parso==0.4.0 # via jedi
parso==0.5.1 # via jedi
pexpect==4.7.0 # via ipython
pickleshare==0.7.5 # via ipython
pillow==6.0.0
pillow==6.1.0
plyvel==1.1.0
prometheus-client==0.6.0 # via notebook
prometheus-client==0.7.1 # via notebook
prompt-toolkit==2.0.9 # via ipython, jupyter-console
protobuf==3.7.1 # via tensorboard, tensorflow
psycopg2-binary==2.8.2
protobuf==3.9.0 # via tensorboard, tensorflow
psycopg2-binary==2.8.3
ptyprocess==0.6.0 # via pexpect, terminado
pygments==2.4.0 # via ipython, jupyter-console, nbconvert, qtconsole
pyparsing==2.4.0 # via matplotlib, rdflib
pyproj==2.1.3 # via geopandas
pyrsistent==0.15.2 # via jsonschema
python-dateutil==2.8.0 # via alembic, botocore, jupyter-client, matplotlib, pandas
python-editor==1.0.4 # via alembic
pycparser==2.19 # via cffi
pygments==2.4.2 # via ipython, jupyter-console, nbconvert, qtconsole
pyparsing==2.4.1 # via matplotlib
pyproj==2.2.1 # via geopandas
pyrsistent==0.15.3 # via jsonschema
python-dateutil==2.8.0 # via botocore, jupyter-client, matplotlib, pandas
pytz==2019.1 # via pandas
pyyaml==5.1
pyzmq==18.0.1 # via jupyter-client, notebook
qtconsole==4.4.4 # via jupyter
rdflib-sqlalchemy==0.3.8 # via gutenberg
rdflib==4.2.2 # via gutenberg, rdflib-sqlalchemy
reportlab==3.5.21 # via svglib
pyyaml==5.1.1
pyzmq==18.0.2 # via jupyter-client, notebook
qtconsole==4.5.1 # via jupyter
reportlab==3.5.23 # via svglib
requests-oauthlib==1.2.0 # via flickrapi
requests-toolbelt==0.9.1 # via flickrapi
requests==2.22.0 # via flickrapi, gutenberg, internetarchive, requests-oauthlib, requests-toolbelt, smart-open, spotipy
requests==2.22.0 # via flickrapi, internetarchive, requests-oauthlib, requests-toolbelt, smart-open, spotipy
resampy==0.2.1 # via librosa
s3transfer==0.2.0 # via boto3
s3transfer==0.2.1 # via boto3
schema==0.7.0 # via internetarchive
scikit-learn==0.21.1 # via librosa, sklearn
scikit-learn==0.21.2 # via librosa, sklearn
scipy==1.3.0 # via gensim, keras, librosa, resampy, scikit-learn
send2trash==1.5.0 # via notebook
shapely==1.6.4.post2 # via geopandas
singledispatch==3.4.0.3 # via nltk
six==1.12.0 # via absl-py, bleach, cycler, fiona, flickrapi, gensim, grpcio, gutenberg, h5py, internetarchive, isodate, jsonschema, keras, keras-preprocessing, librosa, mock, munch, nltk, prompt-toolkit, protobuf, pyrsistent, python-dateutil, rdflib-sqlalchemy, resampy, singledispatch, tensorboard, tensorflow, tensorflow-estimator, traitlets
six==1.12.0 # via absl-py, bleach, cycler, fiona, flickrapi, gensim, grpcio, h5py, internetarchive, jsonschema, keras, keras-preprocessing, librosa, munch, nltk, prompt-toolkit, protobuf, pyrsistent, python-dateutil, resampy, tensorboard, tensorflow, traitlets
sklearn==0.0
smart-open==1.8.3 # via gensim
soupsieve==1.9.1 # via beautifulsoup4
smart-open==1.8.4 # via gensim
soundfile==0.10.2 # via librosa
soupsieve==1.9.2 # via beautifulsoup4
spotipy==2.4.4
sqlalchemy==1.3.3 # via alembic, rdflib-sqlalchemy
svglib==0.9.0
tensorboard==1.13.1 # via tensorflow
tensorflow-estimator==1.13.0 # via tensorflow
tensorflow==1.13.1
svglib==0.9.2
tensorboard==1.14.0 # via tensorflow
tensorflow-estimator==1.14.0 # via tensorflow
tensorflow==1.14
termcolor==1.1.0 # via tensorflow
terminado==0.8.2 # via notebook
testpath==0.4.2 # via nbconvert
tinycss2==1.0.2 # via cssselect2, svglib
tornado==6.0.2 # via ipykernel, jupyter-client, notebook, terminado
tornado==6.0.3 # via ipykernel, jupyter-client, notebook, terminado
total-ordering==0.1.0 # via internetarchive
tqdm==4.32.1
tqdm==4.32.2
traitlets==4.3.2 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, nbconvert, nbformat, notebook, qtconsole
twitter==1.18.0
urllib3==1.24.3 # via botocore, requests
urllib3==1.25.3 # via botocore, requests
wcwidth==0.1.7 # via prompt-toolkit
webencodings==0.5.1 # via bleach, tinycss2
werkzeug==0.15.4 # via tensorboard
werkzeug==0.15.5 # via tensorboard
wheel==0.33.4 # via tensorboard, tensorflow
widgetsnbextension==3.4.2 # via ipywidgets
widgetsnbextension==3.5.0 # via ipywidgets
wrapt==1.11.2 # via tensorflow

# The following packages are considered to be unsafe in a requirements file:
# setuptools==41.0.1 # via ipython, jsonschema, kiwisolver, markdown, protobuf, tensorboard, tinycss2

0 comments on commit 04f56a7

Please sign in to comment.