Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
__pycache__
books
project
*.pyc
pyexamples/*.pdf

# PyInstaller
# Usually these files are written by a python script from a template
Expand Down Expand Up @@ -82,4 +84,4 @@ venv.bak/
/site

# mypy
.mypy_cache/
.mypy_cache/
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,4 +98,14 @@ Now, run the program as follows:
bash ../tikzmake.sh my_arch


## PyTorch Support
Define a feed forward `torch.nn.Sequential` module and let it parse with `pycore.torchparse.TorchArchParser`, then generate the tex file.

Look into the example, provided with `pyexamples/test_torch_mlp.py` and run it via:

```bash
cd pyexamples
bash ../tikzmake.sh test_torch_mlp
```

Have a look into `examples/FeedForward/`.
Binary file added examples/FeedForward/test_torch_mlp.pdf
Binary file not shown.
94 changes: 94 additions & 0 deletions examples/FeedForward/test_torch_mlp.tex
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@

\documentclass[border=8pt, multi, tikz]{standalone}
\usepackage{import}
\subimport{../layers/}{init}
\usetikzlibrary{positioning}
\usetikzlibrary{3d} %for including external image

\def\ConvColor{rgb:yellow,5;red,2.5;white,5}
\def\ConvReluColor{rgb:yellow,5;red,5;white,5}
\def\PoolColor{rgb:red,1;black,0.3}
\def\UnpoolColor{rgb:blue,2;green,1;black,0.3}
\def\FcColor{rgb:blue,5;red,2.5;white,5}
\def\FcReluColor{rgb:blue,5;red,5;white,4}
\def\SoftmaxColor{rgb:magenta,5;black,7}
\def\SumColor{rgb:blue,5;green,15}

\newcommand{\copymidarrow}{\tikz \draw[-Stealth,line width=0.8mm,draw={rgb:blue,4;red,1;green,1;black,3}] (-0.3,0) -- ++(0.3,0);}

\begin{document}
\begin{tikzpicture}
\tikzstyle{connection}=[ultra thick,every node/.style={sloped,allow upside down},draw=\edgecolor,opacity=0.7]
\tikzstyle{copyconnection}=[ultra thick,every node/.style={sloped,allow upside down},draw={rgb:blue,4;red,1;green,1;black,3},opacity=0.7]

\pic[shift={(1, 0, 0)}] at (0, 0, 0)
{Box={
name=module1,
caption=$\mathrm{{FC}}$,
xlabel={{16, }},
zlabel=,
fill=\FcColor,
height=16,
width=1,
depth=1
}
};

\pic[shift={(0.5, 0, 0)}] at (module1-east)
{Box={
name=module2,
caption=$\varphi_\mathrm{{ReLU}}$,
xlabel={{, }},
zlabel=,
fill=\ConvColor,
height=16,
width=0.5,
depth=1
}
};

\pic[shift={(1, 0, 0)}] at (module2-east)
{Box={
name=module3,
caption=$\mathrm{{FC}}$,
xlabel={{16, }},
zlabel=,
fill=\FcColor,
height=16,
width=1,
depth=1
}
};

\draw [connection] (module2-east) -- node {\midarrow} (module3-west);

\pic[shift={(0.5, 0, 0)}] at (module3-east)
{Box={
name=module4,
caption=$\varphi_\mathrm{{ReLU}}$,
xlabel={{, }},
zlabel=,
fill=\ConvColor,
height=16,
width=0.5,
depth=1
}
};

\pic[shift={(1, 0, 0)}] at (module4-east)
{Box={
name=module5,
caption=$\mathrm{{FC}}$,
xlabel={{1, }},
zlabel=,
fill=\FcColor,
height=1,
width=1,
depth=1
}
};

\draw [connection] (module4-east) -- node {\midarrow} (module5-west);

\end{tikzpicture}
\end{document}
Binary file removed pycore/__init__.pyc
Binary file not shown.
35 changes: 16 additions & 19 deletions pycore/tikzeng.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
def to_head( projectpath ):
pathlayers = os.path.join( projectpath, 'layers/' ).replace('\\', '/')
return r"""
\documentclass[border=8pt, multi, tikz]{standalone}
\documentclass[border=8pt, multi, tikz]{standalone}
\usepackage{import}
\subimport{"""+ pathlayers + r"""}{init}
\usetikzlibrary{positioning}
\usetikzlibrary{3d} %for including external image
\usetikzlibrary{3d} %for including external image
"""

def to_cor():
Expand All @@ -19,7 +19,7 @@ def to_cor():
\def\UnpoolColor{rgb:blue,2;green,1;black,0.3}
\def\FcColor{rgb:blue,5;red,2.5;white,5}
\def\FcReluColor{rgb:blue,5;red,5;white,4}
\def\SoftmaxColor{rgb:magenta,5;black,7}
\def\SoftmaxColor{rgb:magenta,5;black,7}
\def\SumColor{rgb:blue,5;green,15}
"""

Expand All @@ -41,15 +41,15 @@ def to_input( pathfile, to='(-3,0,0)', width=8, height=8, name="temp" ):
"""

# Conv
def to_Conv( name, s_filer=256, n_filer=64, offset="(0,0,0)", to="(0,0,0)", width=1, height=40, depth=40, caption=" " ):
def to_Conv( name, s_filer=256, n_filer=64, offset="(0,0,0)", to="(0,0,0)", width=1, height=40, depth=40, fill_color="\ConvColor", caption=" " ):
return r"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
{Box={
name=""" + name +""",
caption="""+ caption +r""",
xlabel={{"""+ str(n_filer) +""", }},
zlabel="""+ str(s_filer) +""",
fill=\ConvColor,
fill=""" + str(fill_color) +""",
height="""+ str(height) +""",
width="""+ str(width) +""",
depth="""+ str(depth) +"""
Expand All @@ -61,7 +61,7 @@ def to_Conv( name, s_filer=256, n_filer=64, offset="(0,0,0)", to="(0,0,0)", widt
# Bottleneck
def to_ConvConvRelu( name, s_filer=256, n_filer=(64,64), offset="(0,0,0)", to="(0,0,0)", width=(2,2), height=40, depth=40, caption=" " ):
return r"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
{RightBandedBox={
name="""+ name +""",
caption="""+ caption +""",
Expand All @@ -81,7 +81,7 @@ def to_ConvConvRelu( name, s_filer=256, n_filer=(64,64), offset="(0,0,0)", to="(
# Pool
def to_Pool(name, offset="(0,0,0)", to="(0,0,0)", width=1, height=32, depth=32, opacity=0.5, caption=" "):
return r"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
{Box={
name="""+name+""",
caption="""+ caption +r""",
Expand All @@ -94,10 +94,10 @@ def to_Pool(name, offset="(0,0,0)", to="(0,0,0)", width=1, height=32, depth=32,
};
"""

# unpool4,
# unpool4,
def to_UnPool(name, offset="(0,0,0)", to="(0,0,0)", width=1, height=32, depth=32, opacity=0.5, caption=" "):
return r"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
{Box={
name="""+ name +r""",
caption="""+ caption +r""",
Expand All @@ -114,7 +114,7 @@ def to_UnPool(name, offset="(0,0,0)", to="(0,0,0)", width=1, height=32, depth=32

def to_ConvRes( name, s_filer=256, n_filer=64, offset="(0,0,0)", to="(0,0,0)", width=6, height=40, depth=40, opacity=0.2, caption=" " ):
return r"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
\pic[shift={ """+ offset +""" }] at """+ to +"""
{RightBandedBox={
name="""+ name + """,
caption="""+ caption + """,
Expand All @@ -134,7 +134,7 @@ def to_ConvRes( name, s_filer=256, n_filer=64, offset="(0,0,0)", to="(0,0,0)", w
# ConvSoftMax
def to_ConvSoftMax( name, s_filer=40, offset="(0,0,0)", to="(0,0,0)", width=1, height=40, depth=40, caption=" " ):
return r"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
{Box={
name=""" + name +""",
caption="""+ caption +""",
Expand All @@ -150,7 +150,7 @@ def to_ConvSoftMax( name, s_filer=40, offset="(0,0,0)", to="(0,0,0)", width=1, h
# SoftMax
def to_SoftMax( name, s_filer=10, offset="(0,0,0)", to="(0,0,0)", width=1.5, height=3, depth=25, opacity=0.8, caption=" " ):
return r"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
{Box={
name=""" + name +""",
caption="""+ caption +""",
Expand All @@ -167,7 +167,7 @@ def to_SoftMax( name, s_filer=10, offset="(0,0,0)", to="(0,0,0)", width=1.5, hei

def to_Sum( name, offset="(0,0,0)", to="(0,0,0)", radius=2.5, opacity=0.6):
return r"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
\pic[shift={"""+ offset +"""}] at """+ to +"""
{Ball={
name=""" + name +""",
fill=\SumColor,
Expand All @@ -188,7 +188,7 @@ def to_skip( of, to, pos=1.25):
return r"""
\path ("""+ of +"""-southeast) -- ("""+ of +"""-northeast) coordinate[pos="""+ str(pos) +"""] ("""+ of +"""-top) ;
\path ("""+ to +"""-south) -- ("""+ to +"""-north) coordinate[pos="""+ str(pos) +"""] ("""+ to +"""-top) ;
\draw [copyconnection] ("""+of+"""-northeast)
\draw [copyconnection] ("""+of+"""-northeast)
-- node {\copymidarrow}("""+of+"""-top)
-- node {\copymidarrow}("""+to+"""-top)
-- node {\copymidarrow} ("""+to+"""-north);
Expand All @@ -202,10 +202,7 @@ def to_end():


def to_generate( arch, pathname="file.tex" ):
with open(pathname, "w") as f:
with open(pathname, "w") as f:
for c in arch:
print(c)
f.write( c )



Binary file removed pycore/tikzeng.pyc
Binary file not shown.
70 changes: 70 additions & 0 deletions pycore/torchparse.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from torchinfo import summary

import pycore.tikzeng as pnn


class TorchArchParser:

text_mapping = {
"Linear": "\\mathrm{{FC}}",
"ReLU": "\\varphi_\\mathrm{{ReLU}}"
}

def __init__(self, torch_module, input_size):

self.torch_module = torch_module
self.summary_list = summary(self.torch_module, input_size=input_size).summary_list

self.arch = self.parse(self.summary_list)

def get_arch(self):

return self.arch

@staticmethod
def parse(summary_list):

arch = list()
arch.append(pnn.to_head(".."))
arch.append(pnn.to_cor())
arch.append(pnn.to_begin())
for idx, layer in enumerate(summary_list[2:], start=1):

if layer.class_name == "Linear":

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If more module types would be parsed in the future, having helper functions or a builder class corresponding to the layer.class_name would omit cluttering the parse() function with further if conditions.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree and this becomes even more important with increased types of supported PyTorch modules (layers, activations). Though, I have oriented myself on the coding style in blocks.py and tikzeng.py.

Further, I propose to abstract parsing and constructing the tikz code toward a collection of layers that map to a similar tikz representation and a general activation representation that maps to all possible PyTorch-implemented activation functions.

Though, all of this could - and imo should - be an improvement and extension on-top of this basic functionaility.

text = TorchArchParser.text_mapping.get(layer.class_name, "\\mathrm{{FC}}")
arch_layer = pnn.to_Conv(
name=f"module{idx}",
s_filer="",
n_filer=layer.module.out_features,
offset=str((1, 0, 0)),
width=1,
height=layer.module.out_features,
depth=1,
fill_color="\\FcColor",
caption=f"${text}$",
to=f"(module{idx-1}-east)" if idx > 1 else str((0, 0, 0)),
)
arch.append(arch_layer)

if idx > 1:
arch_layer = pnn.to_connection(f"module{idx-1}", f"module{idx}")
arch.append(arch_layer)

if layer.class_name in {"ReLU"}:
text = TorchArchParser.text_mapping.get(layer.class_name, "\\varphi")
arch_layer = pnn.to_Conv(
name=f"module{idx}",
s_filer="",
n_filer="",
offset=str((0.5, 0, 0)),
width=0.5,
height=layer.input_size[1],
depth=layer.input_size[0],
caption=f"${text}$",
to=f"(module{idx-1}-east)" if idx > 1 else str((0, 0, 0)),
)
arch.append(arch_layer)

arch.append(pnn.to_end())

return arch
4 changes: 2 additions & 2 deletions pyexamples/test_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
to_Conv("conv1", 512, 64, offset="(0,0,0)", to="(0,0,0)", height=64, depth=64, width=2 ),
to_Pool("pool1", offset="(0,0,0)", to="(conv1-east)"),
to_Conv("conv2", 128, 64, offset="(1,0,0)", to="(pool1-east)", height=32, depth=32, width=2 ),
to_connection( "pool1", "conv2"),
to_connection( "pool1", "conv2"),
to_Pool("pool2", offset="(0,0,0)", to="(conv2-east)", height=28, depth=28, width=1),
to_SoftMax("soft1", 10 ,"(3,0,0)", "(pool1-east)", caption="SOFT" ),
to_connection("pool2", "soft1"),
to_connection("pool2", "soft1"),
to_Sum("sum1", offset="(1.5,0,0)", to="(soft1-east)", radius=2.5, opacity=0.6),
to_connection("soft1", "sum1"),
to_end()
Expand Down
45 changes: 45 additions & 0 deletions pyexamples/test_torch_mlp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import sys
sys.path.append('../')

import torch as th

from pycore.torchparse import TorchArchParser
from pycore.tikzeng import to_generate


DEVICE = th.device('cuda' if th.cuda.is_available() else 'cpu')


class MLP(th.nn.Module):

def __init__(self):

super(MLP, self).__init__()

self.net = th.nn.Sequential(
th.nn.Linear(2, 16),
th.nn.ReLU(),
th.nn.Linear(16, 16),
th.nn.ReLU(),
th.nn.Linear(16, 1)
)

def forward(self, x):

x = x.view(-1, 2)
y_hat = self.net(x)

return y_hat.view(-1, 1)


def main():

mlp = MLP()
parser = TorchArchParser(torch_module=mlp, input_size=(1, 2))
arch = parser.get_arch()
to_generate(arch, pathname="./test_torch_mlp.tex")


if __name__ == '__main__':

main()
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
## The following requirements were added by pip freeze:
torchinfo[pytorch]==1.6.2
3 changes: 1 addition & 2 deletions tikzmake.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/bin/bash


python $1.py
python $1.py
pdflatex $1.tex

rm *.aux *.log *.vscodeLog
Expand Down