Skip to content
This repository was archived by the owner on Nov 24, 2023. It is now read-only.
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 57 additions & 0 deletions docs/nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/ONNX_MODEL.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@

[//]: # (Custom component imports)

import DocString from '@site/src/components/DocString';
import PythonCode from '@site/src/components/PythonCode';
import AppDisplay from '@site/src/components/AppDisplay';
import SectionBreak from '@site/src/components/SectionBreak';
import AppendixSection from '@site/src/components/AppendixSection';

[//]: # (Docstring)

import DocstringSource from '!!raw-loader!./a1-[autogen]/docstring.txt';
import PythonSource from '!!raw-loader!./a1-[autogen]/python_code.txt';

<DocString>{DocstringSource}</DocString>
<PythonCode GLink='AI_ML/LOAD_MODEL/ONNX_MODEL/ONNX_MODEL.py'>{PythonSource}</PythonCode>

<SectionBreak />



[//]: # (Examples)

## Examples

import Example1 from './examples/EX1/example.md';
import App1 from '!!raw-loader!./examples/EX1/app.json';



<AppDisplay
nodeLabel='ONNX_MODEL'
appImg={''}
outputImg={''}
>
{App1}
</AppDisplay>

<Example1 />

<SectionBreak />



[//]: # (Appendix)

import Notes from './appendix/notes.md';
import Hardware from './appendix/hardware.md';
import Media from './appendix/media.md';

## Appendix

<AppendixSection index={0} folderPath='nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/appendix/'><Notes /></AppendixSection>
<AppendixSection index={1} folderPath='nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/appendix/'><Hardware /></AppendixSection>
<AppendixSection index={2} folderPath='nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/appendix/'><Media /></AppendixSection>


41 changes: 41 additions & 0 deletions docs/nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/a1-[autogen]/docstring.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
ONNX_MODEL loads a serialized ONNX model and uses it to make predictions using ONNX Runtime.

This allows supporting a wide range of deep learning frameworks and hardware platforms.

Notes
-----

On the one hand, ONNX is an open format to represent deep learning models.
ONNX defines a common set of operators - the building blocks of machine learning
and deep learning models - and a common file format to enable AI developers
to use models with a variety of frameworks, tools, runtimes, and compilers.

See: https://onnx.ai/

On the other hand, ONNX Runtime is a high-performance inference engine for machine
learning models in the ONNX format. ONNX Runtime has proved to considerably increase
performance in inferencing for a broad range of ML models and hardware platforms.

See: https://onnxruntime.ai/docs/

Moreover, the ONNX Model Zoo is a collection of pre-trained models for common
machine learning tasks. The models are stored in ONNX format and are ready to use
in different inference scenarios.

See: https://github.com/onnx/models

Parameters
----------
file_path : str
Path to a ONNX model to load and use for prediction.

default : Vector
The input tensor to use for prediction.
For now, only a single input tensor is supported.
Note that the input tensor shape is not checked against the model's input shape.

Returns
-------
Vector:
The predictions made by the ONNX model.
For now, only a single output tensor is supported.
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
from flojoy import flojoy, run_in_venv, Vector
from flojoy.utils import FLOJOY_CACHE_DIR


@flojoy
@run_in_venv(
pip_dependencies=[
"onnxruntime",
"numpy",
"onnx",
]
)
def ONNX_MODEL(
file_path: str,
default: Vector,
) -> Vector:


import os
import onnx
import urllib.request
import numpy as np
import onnxruntime as rt

model_name = os.path.basename(file_path)

if file_path.startswith("http://") or file_path.startswith("https://"):
# Downloading the ONNX model from a URL to FLOJOY_CACHE_DIR.
onnx_model_zoo_cache = os.path.join(
FLOJOY_CACHE_DIR, "cache", "onnx", "model_zoo"
)

os.makedirs(onnx_model_zoo_cache, exist_ok=True)

filename = os.path.join(onnx_model_zoo_cache, model_name)

urllib.request.urlretrieve(
url=file_path,
filename=filename,
)

# Using the downloaded file.
file_path = filename

# Pre-loading the serialized model to validate whether is well-formed or not.
model = onnx.load(file_path)
onnx.checker.check_model(model)

# Using ONNX runtime for the ONNX model to make predictions.
sess = rt.InferenceSession(file_path, providers=["CPUExecutionProvider"])

# TODO(jjerphan): Assuming a single input and a single output for now.
input_name = sess.get_inputs()[0].name
label_name = sess.get_outputs()[0].name

# TODO(jjerphan): For now NumPy is assumed to be the main backend for Flojoy.
# We might adapt it in the future so that we can use other backends
# for tensor libraries for application using Deep Learning libraries.
input_tensor = np.asarray(default.v, dtype=np.float32)
predictions = sess.run([label_name], {input_name: input_tensor})[0]

return Vector(v=predictions)
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
This node does not require any peripheral hardware to operate. Please see INSTRUMENTS for nodes that interact with the physical world through connected hardware.
1 change: 1 addition & 0 deletions docs/nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/appendix/media.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
No supporting screenshots, photos, or videos have been added to the media.md file for this node.
1 change: 1 addition & 0 deletions docs/nodes/AI_ML/LOAD_MODEL/ONNX_MODEL/appendix/notes.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
No theory or technical notes have been contributed for this node yet.
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,16 @@ The BURST_MODE_33510B node is used to turn the Burst mode on or off.
The burst mode is way to have signals come in "bursts" that are triggered
externally or with a timer for instance.

If the "VISA_address" parameter is not specified the VISA_index will be
used to find the address. The LIST_VISA node can be used to show the
indicies of all available VISA instruments.
Requires a CONNECTION_33510B node at the start of the app to connect with
the instrument. The VISA address will then be listed under 'connection'.

This node should also work with compatible Keysight 33XXX wavefunction
generators (although they are untested).

Parameters
----------
VISA_address: str
The VISA address to query.
VISA_index: int
The address will be found from LIST_VISA node list with this index.
connection: VisaConnection
The VISA address (requires the CONNECTION_MDO3XXX node).
on_off: str
Turn the burst mode on or off.
channel: str
Expand Down
Original file line number Diff line number Diff line change
@@ -1,22 +1,10 @@
from flojoy import flojoy, DataContainer, TextBlob
import pyvisa
from flojoy import flojoy, DataContainer, TextBlob, VisaConnection
from typing import Optional, Literal
from qcodes.instrument_drivers.Keysight import Keysight33512B
from usb.core import USBError


@flojoy(
deps={
"pyvisa": "1.13.0",
"pyusb": "1.2.1",
"zeroconf": "0.102.0",
"pyvisa_py": "0.7.0",
"qcodes": "0.39.1",
}
)
@flojoy(inject_connection=True)
def BURST_MODE_33510B(
VISA_address: Optional[str],
VISA_index: Optional[int] = 0,
connection: VisaConnection,
on_off: Literal["ON", "OFF"] = "OFF",
channel: Literal["ch1", "ch2"] = "ch1",
trigger_source: Literal["EXT", "IMM", "TIM"] = "TIM",
Expand All @@ -28,25 +16,10 @@ def BURST_MODE_33510B(
burst_phase: float = 0,
burst_polarity: Literal["NORM", "INV"] = "NORM",
default: Optional[DataContainer] = None,
) -> Optional[DataContainer]:
) -> TextBlob:


rm = pyvisa.ResourceManager("@py")
if VISA_address == "":
VISA_addresses = rm.list_resources()
VISA_address = VISA_addresses[int(VISA_index)]

try:
ks = Keysight33512B(
"ks",
VISA_address,
visalib="@py",
device_clear=False,
)
except USBError as err:
raise Exception(
"USB port error. Trying unplugging+replugging the port."
) from err
ks = connection.get_handle()

channel_str = channel
channel = getattr(ks, channel)
Expand Down
Loading