[DOCS] Added .ipynb tutorials in docs

This commit is contained in:
Philippe Tillet
2021-03-06 02:57:41 -05:00
parent 0c13b8ff0e
commit 5172792543
6 changed files with 37 additions and 41 deletions

View File

@@ -30,7 +30,7 @@
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autosectionlabel']
extensions = ['nbsphinx', 'sphinx.ext.autosectionlabel']
autosectionlabel_prefix_document = True
# Add any paths that contain templates here, relative to this directory.
@@ -69,7 +69,7 @@ language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**.ipynb_checkpoints']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'

View File

@@ -17,5 +17,5 @@ Welcome to Triton's documentation!
:maxdepth: 1
:caption: Tutorials
Vector Addition <https://github.com/ptillet/triton/blob/master/python/tutorials/01-vector-add.ipynb>
Fused Softmax <https://github.com/ptillet/triton/blob/master/python/tutorials/02-fused-softmax.ipynb>
Vector Addition <tutorials/01-vector-add.ipynb>
Fused Softmax <tutorials/02-fused-softmax.ipynb>

View File

@@ -0,0 +1 @@
../../python/tutorials/01-vector-add.ipynb

View File

@@ -0,0 +1 @@
../../python/tutorials/02-fused-softmax.ipynb

View File

@@ -2,10 +2,10 @@
"cells": [
{
"cell_type": "markdown",
"id": "induced-zoning",
"id": "acute-possession",
"metadata": {},
"source": [
"# Getting Started"
"# Vector Addition"
]
},
{
@@ -24,7 +24,7 @@
"id": "identical-conditions",
"metadata": {},
"source": [
"# Writing the Compute Kernel"
"## Writing the Compute Kernel"
]
},
{
@@ -74,7 +74,7 @@
"id": "forbidden-wednesday",
"metadata": {},
"source": [
"# Writing the Torch bindings"
"## Writing the Torch bindings"
]
},
{
@@ -174,7 +174,7 @@
"id": "exclusive-salvation",
"metadata": {},
"source": [
"# Writing a Unit Test"
"## Writing a Unit Test"
]
},
{
@@ -218,7 +218,7 @@
"id": "polished-australia",
"metadata": {},
"source": [
"# Writing a Benchmark"
"## Writing a Benchmark"
]
},
{
@@ -266,7 +266,9 @@
"cell_type": "code",
"execution_count": 15,
"id": "pleasant-valley",
"metadata": {},
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
@@ -301,14 +303,6 @@
"source": [
"Our op is on-par with Torch's vectorized element-wise kernel when the vectors are large enough. One caveat is that the latency of PyTorch is much smaller for small vectors (3us vs 18-20us). This is something we are actively working on to reduce."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "agreed-backing",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {

File diff suppressed because one or more lines are too long