Browse Source

Initial work for AIDA-X module

Signed-off-by: falkTX <falktx@falktx.com>
tags/23.07
falkTX 2 years ago
parent
commit
871c6dbaf4
Signed by: falkTX <falktx@falktx.com> GPG Key ID: CDBAA37ABC74FBA0
15 changed files with 1922 additions and 0 deletions
  1. +3
    -0
      .gitmodules
  2. +137
    -0
      plugins/Cardinal/orig/AIDA-X.svg
  3. +10
    -0
      plugins/Cardinal/plugin.json
  4. +184
    -0
      plugins/Cardinal/res/AIDA-X.svg
  5. +8
    -0
      plugins/Cardinal/res/aida-x-knob.svg
  6. +35
    -0
      plugins/Cardinal/res/aida-x-scale.svg
  7. +605
    -0
      plugins/Cardinal/src/AIDA-X.cpp
  8. +165
    -0
      plugins/Cardinal/src/AIDA-X/Biquad.cpp
  9. +60
    -0
      plugins/Cardinal/src/AIDA-X/Biquad.h
  10. +1
    -0
      plugins/Cardinal/src/AIDA-X/RTNeural
  11. +683
    -0
      plugins/Cardinal/src/AIDA-X/model_variant.hpp
  12. +1
    -0
      plugins/Cardinal/src/plugin.hpp
  13. +28
    -0
      plugins/Makefile
  14. +1
    -0
      plugins/plugins-mini.cpp
  15. +1
    -0
      plugins/plugins.cpp

+ 3
- 0
.gitmodules View File

@@ -227,3 +227,6 @@
[submodule "plugins/Sapphire"]
path = plugins/Sapphire
url = https://github.com/cosinekitty/sapphire.git
[submodule "plugins/Cardinal/src/AIDA-X/RTNeural"]
path = plugins/Cardinal/src/AIDA-X/RTNeural
url = https://github.com/jatinchowdhury18/RTNeural.git

+ 137
- 0
plugins/Cardinal/orig/AIDA-X.svg View File

@@ -0,0 +1,137 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->

<svg
width="116.84mm"
height="128.5mm"
viewBox="0 0 116.84 128.5"
version="1.1"
id="svg4620"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:dc="http://purl.org/dc/elements/1.1/">
<defs
id="defs4614">
<style
id="style6"
type="text/css">
.str0 {stroke:#565656;stroke-width:0.0966867}
.str1 {stroke:#4F4F4F;stroke-width:0.193345}
.fil0 {fill:none}
.fil2 {fill:#2B2A29}
.fil1 {fill:#6B6B6B}
</style>
<clipPath
clipPathUnits="userSpaceOnUse"
id="clipPath847">
<use
x="0"
y="0"
xlink:href="#g843"
id="use849"
width="100%"
height="100%" />
</clipPath>
</defs>
<metadata
id="metadata4617">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<g
id="layer1"
transform="translate(0,-168.5)">
<g
id="g845"
clip-path="url(#clipPath847)"
transform="translate(35.668149)">
<g
id="g843">
<g
transform="matrix(6.342689,0,0,6.342689,5.4472535,175.6457)"
id="g5299"
style="fill-rule:evenodd">
<g
id="Layer_x0020_1"
transform="translate(-1.6191379e-5,-0.08553947)">
<metadata
id="CorelCorpID_0Corel-Layer" />
<circle
class="fil0 str0"
cx="0.525226"
cy="0.61075097"
r="0.15916"
id="circle10"
style="fill:none;stroke:#ffffff;stroke-width:0.0966867;stroke-opacity:1" />
<circle
class="fil0 str1"
cx="0.525226"
cy="0.61075097"
r="0.42853901"
id="circle12"
style="fill:none;stroke:#ffffff;stroke-width:0.193345;stroke-opacity:1" />
</g>
</g>
<g
transform="translate(-0.09449404,0.14174107)"
id="text5303"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:10.5833px;line-height:1.25;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
aria-label="Cardinal">
<path
id="path5305"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 17.923601,176.99103 v 0.66043 q -0.31626,-0.29456 -0.675928,-0.44028 -0.356567,-0.14573 -0.759643,-0.14573 -0.79375,0 -1.21543,0.48679 -0.42168,0.48369 -0.42168,1.40146 0,0.91468 0.42168,1.40147 0.42168,0.48369 1.21543,0.48369 0.403076,0 0.759643,-0.14573 0.359668,-0.14572 0.675928,-0.44028 v 0.65422 q -0.328662,0.22325 -0.697632,0.33487 -0.365869,0.11162 -0.775146,0.11162 -1.051099,0 -1.655713,-0.64182 -0.604615,-0.64492 -0.604615,-1.75804 0,-1.11621 0.604615,-1.75803 0.604614,-0.64492 1.655713,-0.64492 0.415478,0 0.781347,0.11162 0.36897,0.10852 0.691431,0.32866 z" />
<path
id="path5307"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 20.444377,179.51801 q -0.69143,0 -0.958081,0.15813 -0.26665,0.15813 -0.26665,0.5395 0,0.30386 0.198437,0.48369 0.201538,0.17674 0.545703,0.17674 0.47439,0 0.759644,-0.33487 0.288355,-0.33796 0.288355,-0.89606 v -0.12713 z m 1.137915,-0.23564 v 1.98127 h -0.570507 v -0.5271 q -0.195337,0.31626 -0.486792,0.46819 -0.291456,0.14883 -0.713135,0.14883 -0.533301,0 -0.849561,-0.29766 -0.313159,-0.30075 -0.313159,-0.80305 0,-0.58601 0.390674,-0.88367 0.393774,-0.29765 1.172021,-0.29765 h 0.799952 v -0.0558 q 0,-0.39378 -0.26045,-0.60772 -0.257348,-0.21704 -0.725537,-0.21704 -0.297656,0 -0.579809,0.0713 -0.282154,0.0713 -0.542603,0.21394 v -0.52709 q 0.313159,-0.12093 0.607715,-0.17984 0.294556,-0.062 0.573608,-0.062 0.753443,0 1.125513,0.39067 0.37207,0.39068 0.37207,1.18443 z" />
<path
id="path5309"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 24.769695,178.32429 q -0.09612,-0.0558 -0.21084,-0.0806 -0.111621,-0.0279 -0.248047,-0.0279 -0.483691,0 -0.744141,0.31626 -0.257348,0.31315 -0.257348,0.90227 v 1.82934 H 22.73571 v -3.47265 h 0.573609 v 0.5395 q 0.179834,-0.31626 0.468188,-0.46819 0.288355,-0.15503 0.700733,-0.15503 0.05891,0 0.130224,0.009 0.07131,0.006 0.15813,0.0217 z" />
<path
id="path5311"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 27.541618,178.31808 v -1.87895 h 0.570508 v 4.82451 h -0.570508 v -0.5209 q -0.179834,0.31006 -0.455786,0.46199 -0.272851,0.14883 -0.657324,0.14883 -0.629419,0 -1.026294,-0.5023 -0.393774,-0.50229 -0.393774,-1.32085 0,-0.81855 0.393774,-1.32085 0.396875,-0.50229 1.026294,-0.50229 0.384473,0 0.657324,0.15193 0.275952,0.14883 0.455786,0.45888 z m -1.944067,1.21233 q 0,0.62942 0.257349,0.98909 0.260449,0.35657 0.713134,0.35657 0.452686,0 0.713135,-0.35657 0.260449,-0.35967 0.260449,-0.98909 0,-0.62942 -0.260449,-0.98598 -0.260449,-0.35967 -0.713135,-0.35967 -0.452685,0 -0.713134,0.35967 -0.257349,0.35656 -0.257349,0.98598 z" />
<path
id="path5313"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 29.287248,177.79099 h 0.570508 v 3.47265 h -0.570508 z m 0,-1.35186 h 0.570508 v 0.72244 h -0.570508 z" />
<path
id="path5315"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 33.938127,179.16765 v 2.09599 h -0.570508 v -2.07739 q 0,-0.49299 -0.192236,-0.73794 -0.192236,-0.24495 -0.576709,-0.24495 -0.461987,0 -0.728638,0.29456 -0.26665,0.29455 -0.26665,0.80305 v 1.96267 h -0.573608 v -3.47265 h 0.573608 v 0.5395 q 0.204639,-0.31316 0.480591,-0.46819 0.279053,-0.15503 0.641821,-0.15503 0.598413,0 0.905371,0.37207 0.306958,0.36897 0.306958,1.08831 z" />
<path
id="path5317"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 36.654241,179.51801 q -0.691431,0 -0.958081,0.15813 -0.26665,0.15813 -0.26665,0.5395 0,0.30386 0.198437,0.48369 0.201538,0.17674 0.545703,0.17674 0.47439,0 0.759644,-0.33487 0.288354,-0.33796 0.288354,-0.89606 v -0.12713 z m 1.137915,-0.23564 v 1.98127 h -0.570508 v -0.5271 q -0.195337,0.31626 -0.486792,0.46819 -0.291455,0.14883 -0.713135,0.14883 -0.5333,0 -0.84956,-0.29766 -0.313159,-0.30075 -0.313159,-0.80305 0,-0.58601 0.390674,-0.88367 0.393774,-0.29765 1.172021,-0.29765 h 0.799951 v -0.0558 q 0,-0.39378 -0.260449,-0.60772 -0.257349,-0.21704 -0.725537,-0.21704 -0.297656,0 -0.57981,0.0713 -0.282153,0.0713 -0.542602,0.21394 v -0.52709 q 0.313159,-0.12093 0.607715,-0.17984 0.294555,-0.062 0.573608,-0.062 0.753442,0 1.125513,0.39067 0.37207,0.39068 0.37207,1.18443 z" />
<path
id="path5319"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 38.967278,176.43913 h 0.570508 v 4.82451 h -0.570508 z" />
</g>
</g>
</g>
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:10.5833px;line-height:1.25;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#808a8a;fill-opacity:1;stroke:none;stroke-width:0.264583"
x="36.506084"
y="188.65312"
id="text845"><tspan
id="tspan843"
x="36.506084"
y="188.65312"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:4.93889px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#808a8a;fill-opacity:1;stroke-width:0.264583">Aida DSP : AIDA-X</tspan></text>
</g>
</svg>

+ 10
- 0
plugins/Cardinal/plugin.json View File

@@ -117,6 +117,16 @@
"Visual"
]
},
{
"slug": "AIDA-X",
"name": "AIDA-X",
"description": "Amp Model Player leveraging AI",
"manualUrl": "https://github.com/DISTRHO/Cardinal/blob/main/docs/CARDINAL-MODULES.md#aidax",
"tags": [
"Distortion",
"Effect"
]
},
{
"slug": "Blank",
"name": "Blank",


+ 184
- 0
plugins/Cardinal/res/AIDA-X.svg View File

@@ -0,0 +1,184 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->

<svg
width="116.84mm"
height="128.5mm"
viewBox="0 0 116.84 128.5"
version="1.1"
id="svg4620"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:dc="http://purl.org/dc/elements/1.1/">
<defs
id="defs4614">
<style
id="style6"
type="text/css">
.str0 {stroke:#565656;stroke-width:0.0966867}
.str1 {stroke:#4F4F4F;stroke-width:0.193345}
.fil0 {fill:none}
.fil2 {fill:#2B2A29}
.fil1 {fill:#6B6B6B}
</style>
<clipPath
clipPathUnits="userSpaceOnUse"
id="clipPath847">
<use
x="0"
y="0"
xlink:href="#g843"
id="use849"
width="100%"
height="100%" />
</clipPath>
</defs>
<metadata
id="metadata4617">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<g
id="layer1"
transform="translate(0,-168.5)">
<g
id="g845"
clip-path="url(#clipPath847)"
transform="translate(35.668149)">
<g
id="g843">
<g
transform="matrix(6.342689,0,0,6.342689,5.4472535,175.6457)"
id="g5299"
style="fill-rule:evenodd">
<g
id="Layer_x0020_1"
transform="translate(-1.6191379e-5,-0.08553947)">
<metadata
id="CorelCorpID_0Corel-Layer" />
<path
id="circle10"
style="fill:none;stroke:#ffffff;stroke-width:0.0966867"
class="fil0 str0"
d="m 0.684386,0.61075097 a 0.15916,0.15916 0 0 1 -0.15916,0.15916001 0.15916,0.15916 0 0 1 -0.15916001,-0.15916001 0.15916,0.15916 0 0 1 0.15916001,-0.15916 0.15916,0.15916 0 0 1 0.15916,0.15916 z" />
<path
id="circle12"
style="fill:none;stroke:#ffffff;stroke-width:0.193345"
class="fil0 str1"
d="M 0.953765,0.61075097 A 0.42853901,0.42853901 0 0 1 0.525226,1.03929 0.42853901,0.42853901 0 0 1 0.09668699,0.61075097 0.42853901,0.42853901 0 0 1 0.525226,0.18221197 a 0.42853901,0.42853901 0 0 1 0.428539,0.428539 z" />
</g>
</g>
<g
transform="translate(-0.09449404,0.14174107)"
id="text5303"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:10.5833px;line-height:1.25;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
aria-label="Cardinal">
<path
id="path5305"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 17.923601,176.99103 v 0.66043 q -0.31626,-0.29456 -0.675928,-0.44028 -0.356567,-0.14573 -0.759643,-0.14573 -0.79375,0 -1.21543,0.48679 -0.42168,0.48369 -0.42168,1.40146 0,0.91468 0.42168,1.40147 0.42168,0.48369 1.21543,0.48369 0.403076,0 0.759643,-0.14573 0.359668,-0.14572 0.675928,-0.44028 v 0.65422 q -0.328662,0.22325 -0.697632,0.33487 -0.365869,0.11162 -0.775146,0.11162 -1.051099,0 -1.655713,-0.64182 -0.604615,-0.64492 -0.604615,-1.75804 0,-1.11621 0.604615,-1.75803 0.604614,-0.64492 1.655713,-0.64492 0.415478,0 0.781347,0.11162 0.36897,0.10852 0.691431,0.32866 z" />
<path
id="path5307"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 20.444377,179.51801 q -0.69143,0 -0.958081,0.15813 -0.26665,0.15813 -0.26665,0.5395 0,0.30386 0.198437,0.48369 0.201538,0.17674 0.545703,0.17674 0.47439,0 0.759644,-0.33487 0.288355,-0.33796 0.288355,-0.89606 v -0.12713 z m 1.137915,-0.23564 v 1.98127 h -0.570507 v -0.5271 q -0.195337,0.31626 -0.486792,0.46819 -0.291456,0.14883 -0.713135,0.14883 -0.533301,0 -0.849561,-0.29766 -0.313159,-0.30075 -0.313159,-0.80305 0,-0.58601 0.390674,-0.88367 0.393774,-0.29765 1.172021,-0.29765 h 0.799952 v -0.0558 q 0,-0.39378 -0.26045,-0.60772 -0.257348,-0.21704 -0.725537,-0.21704 -0.297656,0 -0.579809,0.0713 -0.282154,0.0713 -0.542603,0.21394 v -0.52709 q 0.313159,-0.12093 0.607715,-0.17984 0.294556,-0.062 0.573608,-0.062 0.753443,0 1.125513,0.39067 0.37207,0.39068 0.37207,1.18443 z" />
<path
id="path5309"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 24.769695,178.32429 q -0.09612,-0.0558 -0.21084,-0.0806 -0.111621,-0.0279 -0.248047,-0.0279 -0.483691,0 -0.744141,0.31626 -0.257348,0.31315 -0.257348,0.90227 v 1.82934 H 22.73571 v -3.47265 h 0.573609 v 0.5395 q 0.179834,-0.31626 0.468188,-0.46819 0.288355,-0.15503 0.700733,-0.15503 0.05891,0 0.130224,0.009 0.07131,0.006 0.15813,0.0217 z" />
<path
id="path5311"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 27.541618,178.31808 v -1.87895 h 0.570508 v 4.82451 h -0.570508 v -0.5209 q -0.179834,0.31006 -0.455786,0.46199 -0.272851,0.14883 -0.657324,0.14883 -0.629419,0 -1.026294,-0.5023 -0.393774,-0.50229 -0.393774,-1.32085 0,-0.81855 0.393774,-1.32085 0.396875,-0.50229 1.026294,-0.50229 0.384473,0 0.657324,0.15193 0.275952,0.14883 0.455786,0.45888 z m -1.944067,1.21233 q 0,0.62942 0.257349,0.98909 0.260449,0.35657 0.713134,0.35657 0.452686,0 0.713135,-0.35657 0.260449,-0.35967 0.260449,-0.98909 0,-0.62942 -0.260449,-0.98598 -0.260449,-0.35967 -0.713135,-0.35967 -0.452685,0 -0.713134,0.35967 -0.257349,0.35656 -0.257349,0.98598 z" />
<path
id="path5313"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 29.287248,177.79099 h 0.570508 v 3.47265 h -0.570508 z m 0,-1.35186 h 0.570508 v 0.72244 h -0.570508 z" />
<path
id="path5315"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 33.938127,179.16765 v 2.09599 h -0.570508 v -2.07739 q 0,-0.49299 -0.192236,-0.73794 -0.192236,-0.24495 -0.576709,-0.24495 -0.461987,0 -0.728638,0.29456 -0.26665,0.29455 -0.26665,0.80305 v 1.96267 h -0.573608 v -3.47265 h 0.573608 v 0.5395 q 0.204639,-0.31316 0.480591,-0.46819 0.279053,-0.15503 0.641821,-0.15503 0.598413,0 0.905371,0.37207 0.306958,0.36897 0.306958,1.08831 z" />
<path
id="path5317"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 36.654241,179.51801 q -0.691431,0 -0.958081,0.15813 -0.26665,0.15813 -0.26665,0.5395 0,0.30386 0.198437,0.48369 0.201538,0.17674 0.545703,0.17674 0.47439,0 0.759644,-0.33487 0.288354,-0.33796 0.288354,-0.89606 v -0.12713 z m 1.137915,-0.23564 v 1.98127 h -0.570508 v -0.5271 q -0.195337,0.31626 -0.486792,0.46819 -0.291455,0.14883 -0.713135,0.14883 -0.5333,0 -0.84956,-0.29766 -0.313159,-0.30075 -0.313159,-0.80305 0,-0.58601 0.390674,-0.88367 0.393774,-0.29765 1.172021,-0.29765 h 0.799951 v -0.0558 q 0,-0.39378 -0.260449,-0.60772 -0.257349,-0.21704 -0.725537,-0.21704 -0.297656,0 -0.57981,0.0713 -0.282153,0.0713 -0.542602,0.21394 v -0.52709 q 0.313159,-0.12093 0.607715,-0.17984 0.294555,-0.062 0.573608,-0.062 0.753442,0 1.125513,0.39067 0.37207,0.39068 0.37207,1.18443 z" />
<path
id="path5319"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.35px;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.264583"
d="m 38.967278,176.43913 h 0.570508 v 4.82451 h -0.570508 z" />
</g>
</g>
</g>
<g
aria-label="Aida DSP : AIDA-X"
id="text845"
style="font-size:10.5833px;line-height:1.25;font-family:'DejaVu Sans';-inkscape-font-specification:'DejaVu Sans, Normal';letter-spacing:0px;word-spacing:0px;fill:#808a8a;stroke-width:0.264583">
<path
d="m 38.194182,185.53255 -0.66077,1.7918 h 1.323951 z m -0.274919,-0.4799 h 0.552249 l 1.372182,3.60047 h -0.506429 l -0.327974,-0.92363 h -1.622984 l -0.327974,0.92363 H 36.54467 Z"
style="font-size:4.93889px"
id="path6473" />
<path
d="m 40.350123,185.95217 h 0.443728 v 2.70095 h -0.443728 z m 0,-1.05145 h 0.443728 v 0.5619 h -0.443728 z"
style="font-size:4.93889px"
id="path6475" />
<path
d="m 43.49963,186.36213 v -1.46141 h 0.443728 v 3.7524 H 43.49963 v -0.40514 q -0.139871,0.24116 -0.354501,0.35932 -0.212217,0.11576 -0.511252,0.11576 -0.489548,0 -0.798229,-0.39068 -0.306269,-0.39067 -0.306269,-1.02732 0,-0.63666 0.306269,-1.02733 0.308681,-0.39068 0.798229,-0.39068 0.299035,0 0.511252,0.11817 0.21463,0.11576 0.354501,0.35691 z m -1.512053,0.94293 q 0,0.48954 0.20016,0.76929 0.202572,0.27733 0.554661,0.27733 0.352089,0 0.55466,-0.27733 0.202572,-0.27975 0.202572,-0.76929 0,-0.48955 -0.202572,-0.76688 -0.202571,-0.27974 -0.55466,-0.27974 -0.352089,0 -0.554661,0.27974 -0.20016,0.27733 -0.20016,0.76688 z"
style="font-size:4.93889px"
id="path6477" />
<path
d="m 46.08483,187.29541 q -0.53778,0 -0.745174,0.12299 -0.207395,0.12299 -0.207395,0.41961 0,0.23634 0.15434,0.37621 0.156752,0.13746 0.424436,0.13746 0.36897,0 0.590834,-0.26045 0.224276,-0.26286 0.224276,-0.69695 v -0.0989 z m 0.885045,-0.18328 v 1.54099 h -0.443728 v -0.40996 q -0.151929,0.24598 -0.378616,0.36414 -0.226688,0.11576 -0.554661,0.11576 -0.414789,0 -0.660769,-0.23151 -0.243569,-0.23393 -0.243569,-0.6246 0,-0.45579 0.303858,-0.6873 0.306269,-0.23151 0.911572,-0.23151 h 0.622185 v -0.0434 q 0,-0.30627 -0.202572,-0.47267 -0.20016,-0.16881 -0.564307,-0.16881 -0.23151,0 -0.450963,0.0555 -0.219452,0.0555 -0.422024,0.1664 v -0.40996 q 0.243568,-0.0941 0.472667,-0.13987 0.229099,-0.0482 0.44614,-0.0482 0.586011,0 0.875399,0.30386 0.289388,0.30386 0.289388,0.92122 z"
style="font-size:4.93889px"
id="path6479" />
<path
d="m 49.960219,185.45297 v 2.79983 h 0.588423 q 0.745174,0 1.090028,-0.33762 0.347266,-0.33762 0.347266,-1.06591 0,-0.72347 -0.347266,-1.05868 -0.344854,-0.33762 -1.090028,-0.33762 z m -0.487136,-0.40032 h 1.0008 q 1.04662,0 1.536168,0.4365 0.489549,0.43408 0.489549,1.36012 0,0.93086 -0.49196,1.36736 -0.49196,0.43649 -1.533757,0.43649 h -1.0008 z"
style="font-size:4.93889px"
id="path6481" />
<path
d="m 55.434477,185.17082 v 0.47508 q -0.27733,-0.13264 -0.52331,-0.19775 -0.24598,-0.0651 -0.475079,-0.0651 -0.397909,0 -0.61495,0.15434 -0.214629,0.15434 -0.214629,0.4389 0,0.23875 0.142282,0.36174 0.144694,0.12058 0.545014,0.19533 l 0.294212,0.0603 q 0.545014,0.1037 0.803052,0.36656 0.260449,0.26045 0.260449,0.69936 0,0.52331 -0.352089,0.7934 -0.349677,0.2701 -1.027328,0.2701 -0.255626,0 -0.545014,-0.0579 -0.286976,-0.0579 -0.595657,-0.17122 v -0.50161 q 0.296623,0.1664 0.581188,0.25081 0.284565,0.0844 0.559483,0.0844 0.417202,0 0.643889,-0.16399 0.226687,-0.16398 0.226687,-0.46784 0,-0.26527 -0.163986,-0.41479 -0.161575,-0.14952 -0.532957,-0.22428 l -0.296623,-0.0579 q -0.545014,-0.10852 -0.788582,-0.34003 -0.243568,-0.23151 -0.243568,-0.64389 0,-0.47749 0.335207,-0.75241 0.33762,-0.27492 0.928454,-0.27492 0.253214,0 0.516075,0.0458 0.262861,0.0458 0.53778,0.13746 z"
style="font-size:4.93889px"
id="path6483" />
<path
d="m 56.898299,185.45297 v 1.35289 h 0.612538 q 0.340031,0 0.525722,-0.17604 0.185691,-0.17605 0.185691,-0.50161 0,-0.32315 -0.185691,-0.49919 -0.185691,-0.17605 -0.525722,-0.17605 z m -0.487137,-0.40032 h 1.099675 q 0.605304,0 0.913984,0.27492 0.311092,0.27251 0.311092,0.80064 0,0.53296 -0.311092,0.80546 -0.30868,0.27251 -0.913984,0.27251 h -0.612538 v 1.44694 h -0.487137 z"
style="font-size:4.93889px"
id="path6485" />
<path
d="m 61.053429,188.04058 h 0.508841 v 0.61254 h -0.508841 z m 0,-1.94131 h 0.508841 v 0.61254 h -0.508841 z"
style="font-size:4.93889px"
id="path6487" />
<path
d="m 65.396662,185.53255 -0.66077,1.7918 h 1.323951 z m -0.274919,-0.4799 h 0.552249 l 1.372182,3.60047 h -0.506429 l -0.327973,-0.92363 h -1.622985 l -0.327973,0.92363 H 63.74715 Z"
style="font-size:4.93889px"
id="path6489" />
<path
d="m 67.571895,185.05265 h 0.487136 v 3.60047 h -0.487136 z"
style="font-size:4.93889px"
id="path6491" />
<path
d="m 69.51562,185.45297 v 2.79983 h 0.588422 q 0.745175,0 1.090029,-0.33762 0.347266,-0.33762 0.347266,-1.06591 0,-0.72347 -0.347266,-1.05868 -0.344854,-0.33762 -1.090029,-0.33762 z m -0.487137,-0.40032 h 1.000801 q 1.04662,0 1.536168,0.4365 0.489548,0.43408 0.489548,1.36012 0,0.93086 -0.491959,1.36736 -0.49196,0.43649 -1.533757,0.43649 h -1.000801 z"
style="font-size:4.93889px"
id="path6493" />
<path
d="m 73.94808,185.53255 -0.660769,1.7918 h 1.32395 z m -0.274918,-0.4799 h 0.552248 l 1.372182,3.60047 h -0.506429 l -0.327973,-0.92363 h -1.622985 l -0.327973,0.92363 h -0.513664 z"
style="font-size:4.93889px"
id="path6495" />
<path
d="m 75.771224,187.10248 h 1.299835 v 0.3955 h -1.299835 z"
style="font-size:4.93889px"
id="path6497" />
<path
d="m 77.377327,185.05265 h 0.52331 l 0.894691,1.33842 0.899515,-1.33842 h 0.52331 l -1.157552,1.7291 1.234722,1.87137 h -0.52331 l -1.012858,-1.53134 -1.020093,1.53134 H 77.21334 l 1.285366,-1.92202 z"
style="font-size:4.93889px"
id="path6499" />
</g>
</g>
</svg>

+ 8
- 0
plugins/Cardinal/res/aida-x-knob.svg View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="47px" height="47px" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;">
<g id="knobLDark">
<path id="path3832" d="M23.521,45.109c-7.674,0 -3.302,3.9 -10.224,0.498c-6.922,-3.403 -1.202,-2.341 -5.997,-8.501c-4.795,-6.159 -5.059,-0.201 -6.763,-7.827c-1.704,-7.625 1.043,-2.42 2.76,-10.046c1.718,-7.626 -2.998,-4.102 1.797,-10.221c4.795,-6.12 2.51,-0.673 9.432,-4.035c6.921,-3.363 1.321,-4.977 8.995,-4.977c7.675,0 2.087,1.574 8.996,4.977c6.909,3.402 4.636,-2.045 9.432,4.035c4.795,6.078 0.079,2.689 1.796,10.26c1.717,7.572 4.465,2.422 2.761,10.048c-1.704,7.625 -1.982,1.708 -6.763,7.827c-4.782,6.119 0.924,5.057 -5.998,8.46c-6.921,3.402 -2.549,-0.498 -10.224,-0.498Z" style="fill:rgb(230,229,229);fill-rule:nonzero;"/>
<path d="M23.521,23.5l0,-23.5" style="fill:none;fill-rule:nonzero;stroke:rgb(51,51,51);stroke-width:2.29px;"/>
</g>
</svg>

+ 35
- 0
plugins/Cardinal/res/aida-x-scale.svg View File

@@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 16.0.3, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="1000px" height="1000px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve">
<image display="none" overflow="visible" width="1100" height="1100" xlink:href="scale.png" transform="matrix(0.9999 0 0 0.9999 -50 -20)">
</image>
<circle display="none" fill="#969696" cx="500" cy="529.932" r="397"/>
<path fill="#969696" d="M504.417,129.643c0,2.913-2.238,5.274-5,5.274l0,0c-2.761,0-5-2.361-5-5.274V64.774
c0-2.913,2.239-5.274,5-5.274l0,0c2.762,0,5,2.361,5,5.274V129.643z"/>
<path fill="#969696" d="M350.896,158.423c1.114,2.69-0.05,5.729-2.602,6.785l0,0c-2.551,1.057-5.523-0.268-6.638-2.958
l-24.824-59.931c-1.114-2.691,0.05-5.729,2.602-6.786l0,0c2.552-1.057,5.522,0.269,6.638,2.959L350.896,158.423z"/>
<path fill="#969696" d="M220.075,243.762c2.06,2.06,2.146,5.312,0.193,7.264l0,0c-1.952,1.953-5.205,1.867-7.265-0.193
l-45.869-45.87c-2.06-2.059-2.146-5.312-0.193-7.265l0,0c1.953-1.953,5.205-1.865,7.265,0.194L220.075,243.762z"/>
<path fill="#969696" d="M131.87,372.668c2.691,1.114,4.016,4.086,2.959,6.637l0,0c-1.057,2.551-4.095,3.716-6.786,2.602
l-59.931-24.825c-2.69-1.114-4.016-4.086-2.959-6.638l0,0c1.058-2.552,4.095-3.715,6.786-2.601L131.87,372.668z"/>
<path fill="#969696" d="M99.71,525.516c2.913,0,5.273,2.238,5.273,5l0,0c0,2.762-2.36,5-5.274,5.001H34.841
c-2.912,0-5.274-2.239-5.274-5.001l0,0c0.001-2.762,2.361-5,5.274-5H99.71z"/>
<path fill="#969696" d="M128.49,679.037c2.691-1.115,5.729,0.049,6.785,2.601l0,0c1.057,2.552-0.268,5.522-2.959,6.639
l-59.931,24.824c-2.69,1.114-5.729-0.051-6.787-2.603l0,0c-1.056-2.552,0.269-5.522,2.96-6.638L128.49,679.037z"/>
<path fill="#969696" d="M213.829,809.857c2.06-2.06,5.312-2.146,7.265-0.193l0,0c1.952,1.953,1.865,5.205-0.193,7.266
l-45.869,45.869c-2.06,2.059-5.313,2.146-7.267,0.192l0,0c-1.952-1.953-1.865-5.204,0.194-7.265L213.829,809.857z"/>
<path fill="#969696" d="M779.925,816.104c-2.06-2.06-2.146-5.313-0.193-7.266l0,0c1.952-1.952,5.206-1.864,7.266,0.195
l45.868,45.869c2.06,2.06,2.146,5.311,0.192,7.266l0,0c-1.952,1.953-5.202,1.864-7.263-0.194L779.925,816.104z"/>
<path fill="#969696" d="M868.129,687.197c-2.69-1.114-4.015-4.087-2.959-6.639l0,0c1.057-2.551,4.097-3.715,6.788-2.6l59.93,24.824
c2.69,1.115,4.015,4.086,2.958,6.64l0,0c-1.057,2.551-4.093,3.713-6.784,2.6L868.129,687.197z"/>
<path fill="#969696" d="M900.289,534.35c-2.912,0-5.273-2.24-5.274-5.001l0,0c0-2.762,2.363-5,5.276-5l64.868,0.001
c2.913,0,5.272,2.238,5.273,5.002l0,0c0,2.761-2.36,4.996-5.272,4.998H900.289z"/>
<path fill="#969696" d="M871.509,380.829c-2.69,1.115-5.729-0.052-6.786-2.602l0,0c-1.058-2.552,0.27-5.524,2.961-6.639
l59.931-24.823c2.691-1.114,5.728,0.051,6.786,2.604l0,0c1.057,2.551-0.269,5.519-2.958,6.635L871.509,380.829z"/>
<path fill="#969696" d="M786.17,250.009c-2.06,2.06-5.313,2.145-7.266,0.193l0,0c-1.953-1.953-1.864-5.207,0.195-7.267
l45.869-45.868c2.061-2.06,5.311-2.145,7.266-0.191l0,0c1.953,1.952,1.864,5.201-0.193,7.262L786.17,250.009z"/>
<path fill="#969696" d="M657.265,161.804c-1.115,2.691-4.089,4.015-6.64,2.959l0,0c-2.551-1.057-3.714-4.097-2.6-6.789l24.824-59.93
c1.115-2.691,4.086-4.014,6.64-2.957l0,0c2.552,1.056,3.713,4.092,2.601,6.783L657.265,161.804z"/>
</svg>

+ 605
- 0
plugins/Cardinal/src/AIDA-X.cpp View File

@@ -0,0 +1,605 @@
/*
* AIDA-X Cardinal plugin
* Copyright (C) 2022-2023 Massimo Pennazio <maxipenna@libero.it>
* Copyright (C) 2023 Filipe Coelho <falktx@falktx.com>
* SPDX-License-Identifier: GPL-3.0-or-later
*/

#include "plugincontext.hpp"
#include "ModuleWidgets.hpp"

#include "extra/Sleep.hpp"

#include "AIDA-X/Biquad.cpp"
#include "AIDA-X/model_variant.hpp"

#ifndef HEADLESS
# include "ImGuiWidget.hpp"
# include "ghc/filesystem.hpp"
#endif

// --------------------------------------------------------------------------------------------------------------------

/* Define a constexpr for converting a gain in dB to a coefficient */
static constexpr float DB_CO(const float g) { return g > -90.f ? std::pow(10.f, g * 0.05f) : 0.f; }

/* Define a macro to re-maps a number from one range to another */
static constexpr float MAP(const float x, const float in_min, const float in_max, const float out_min, const float out_max)
{
return ((x - in_min) * (out_max - out_min) / (in_max - in_min)) + out_min;
}

/* Defines for tone controls */
static constexpr const float COMMON_Q = 0.707f;

/* Defines for antialiasing filter */
static constexpr const float INLPF_MAX_CO = 0.99f * 0.5f; /* coeff * ((samplerate / 2) / samplerate) */
static constexpr const float INLPF_MIN_CO = 0.25f * 0.5f; /* coeff * ((samplerate / 2) / samplerate) */

// --------------------------------------------------------------------------------------------------------------------

struct DynamicModel {
ModelVariantType variant;
bool input_skip; /* Means the model has been trained with first input element skipped to the output */
float input_gain;
float output_gain;
};

// --------------------------------------------------------------------------------------------------------------------
// This function carries model calculations

static inline
void applyModel(DynamicModel* model, float* const out, uint32_t numSamples)
{
const bool input_skip = model->input_skip;
const float input_gain = model->input_gain;
const float output_gain = model->output_gain;

std::visit(
[&out, numSamples, input_skip, input_gain, output_gain] (auto&& custom_model)
{
using ModelType = std::decay_t<decltype (custom_model)>;

if (d_isNotEqual(input_gain, 1.f))
{
for (uint32_t i=0; i<numSamples; ++i)
out[i] *= input_gain;
}

if constexpr (ModelType::input_size == 1)
{
if (input_skip)
{
for (uint32_t i=0; i<numSamples; ++i)
out[i] += custom_model.forward(out + i);
}
else
{
for (uint32_t i=0; i<numSamples; ++i)
out[i] = custom_model.forward(out + i) * output_gain;
}
}

if (input_skip && d_isNotEqual(output_gain, 1.f))
{
for (uint32_t i=0; i<numSamples; ++i)
out[i] *= output_gain;
}
},
model->variant
);
}

static inline
float applyModel(DynamicModel* model, float sample)
{
const bool input_skip = model->input_skip;
const float input_gain = model->input_gain;
const float output_gain = model->output_gain;

sample *= input_gain;

std::visit(
[&sample, input_skip, output_gain] (auto&& custom_model)
{
using ModelType = std::decay_t<decltype (custom_model)>;
float* out = &sample;

if constexpr (ModelType::input_size == 1)
{
if (input_skip)
{
sample += custom_model.forward(out);
sample *= output_gain;
}
else
{
sample = custom_model.forward(out) * output_gain;
}
}
},
model->variant
);
return sample;
}

// --------------------------------------------------------------------------------------------------------------------

struct AidaPluginModule : Module {
enum ParamIds {
PARAM_INPUT_LEVEL,
PARAM_OUTPUT_LEVEL,
NUM_PARAMS
};
enum InputIds {
AUDIO_INPUT,
NUM_INPUTS
};
enum OutputIds {
AUDIO_OUTPUT,
NUM_OUTPUTS
};
enum LightIds {
NUM_LIGHTS
};

enum Parameters {
kParameterCount
};

CardinalPluginContext* const pcontext;
bool fileChanged = false;
std::string currentFile;

Biquad dc_blocker { bq_type_highpass, 0.5f, COMMON_Q, 0.0f };
Biquad in_lpf { bq_type_lowpass, 0.5f, COMMON_Q, 0.0f };
dsp::ExponentialFilter inlevel;
dsp::ExponentialFilter outlevel;
DynamicModel* model = nullptr;
std::atomic<bool> activeModel { false };

AidaPluginModule()
: pcontext(static_cast<CardinalPluginContext*>(APP))
{
config(NUM_PARAMS, NUM_INPUTS, NUM_OUTPUTS, NUM_LIGHTS);

configInput(AUDIO_INPUT, "Audio");
configOutput(AUDIO_OUTPUT, "Audio");
configParam(PARAM_INPUT_LEVEL, -12.f, 12.f, 0.f, "Input level", " dB");
configParam(PARAM_OUTPUT_LEVEL, -12.f, 12.f, 0.f, "Output level", " dB");

inlevel.setTau(1 / 30.f);
outlevel.setTau(1 / 30.f);
}

~AidaPluginModule() override
{
delete model;
}

json_t* dataToJson() override
{
json_t* const rootJ = json_object();
DISTRHO_SAFE_ASSERT_RETURN(rootJ != nullptr, nullptr);

json_object_set_new(rootJ, "filepath", json_string(currentFile.c_str()));

return rootJ;
}

void dataFromJson(json_t* const rootJ) override
{
fileChanged = false;

if (json_t* const filepathJ = json_object_get(rootJ, "filepath"))
{
const char* const filepath = json_string_value(filepathJ);

if (filepath[0] != '\0')
{
currentFile = filepath;
fileChanged = true;

loadModelFromFile(filepath);
}
}

if (! fileChanged)
{
currentFile.clear();
fileChanged = true;
}
}

void loadModelFromFile(const char* const filename)
{
try {
std::ifstream jsonStream(filename, std::ifstream::binary);
loadModelFromStream(jsonStream);
}
catch (const std::exception& e) {
d_stderr2("Unable to load json file: %s\nError: %s", filename, e.what());
};
}

void loadModelFromStream(std::istream& jsonStream)
{
int input_size;
int input_skip;
float input_gain;
float output_gain;
nlohmann::json model_json;

try {
jsonStream >> model_json;

/* Understand which model type to load */
input_size = model_json["in_shape"].back().get<int>();
if (input_size > 1) { // MAX_INPUT_SIZE
throw std::invalid_argument("Value for input_size not supported");
}

if (model_json["in_skip"].is_number()) {
input_skip = model_json["in_skip"].get<int>();
if (input_skip > 1)
throw std::invalid_argument("Values for in_skip > 1 are not supported");
}
else {
input_skip = 0;
}

if (model_json["in_gain"].is_number()) {
input_gain = DB_CO(model_json["in_gain"].get<float>());
}
else {
input_gain = 1.0f;
}

if (model_json["out_gain"].is_number()) {
output_gain = DB_CO(model_json["out_gain"].get<float>());
}
else {
output_gain = 1.0f;
}
}
catch (const std::exception& e) {
d_stderr2("Unable to load json, error: %s", e.what());
return;
}

std::unique_ptr<DynamicModel> newmodel = std::make_unique<DynamicModel>();

try {
if (! custom_model_creator (model_json, newmodel->variant))
throw std::runtime_error ("Unable to identify a known model architecture!");

std::visit (
[&model_json] (auto&& custom_model)
{
using ModelType = std::decay_t<decltype (custom_model)>;
if constexpr (! std::is_same_v<ModelType, NullModel>)
{
custom_model.parseJson (model_json, true);
custom_model.reset();
}
},
newmodel->variant);
}
catch (const std::exception& e) {
d_stderr2("Error loading model: %s", e.what());
return;
}

// save extra info
newmodel->input_skip = input_skip != 0;
newmodel->input_gain = input_gain;
newmodel->output_gain = output_gain;

// Pre-buffer to avoid "clicks" during initialization
float out[2048] = {};
applyModel(newmodel.get(), out, ARRAY_SIZE(out));

// swap active model
DynamicModel* const oldmodel = model;
model = newmodel.release();

// if processing, wait for process cycle to complete
while (oldmodel != nullptr && activeModel.load())
d_msleep(1);

delete oldmodel;
}

void process(const ProcessArgs& args) override
{
const float stime = args.sampleTime;
const float inlevelv = DB_CO(params[PARAM_INPUT_LEVEL].getValue());
const float outlevelv = DB_CO(params[PARAM_OUTPUT_LEVEL].getValue());

// High frequencies roll-off (lowpass)
float sample = in_lpf.process(inputs[AUDIO_INPUT].getVoltage() * 0.1f) * inlevel.process(stime, inlevelv);

// run model
if (model != nullptr)
{
activeModel.store(true);
sample = applyModel(model, sample);
activeModel.store(false);
}

// DC blocker filter (highpass)
outputs[AUDIO_OUTPUT].setVoltage(dc_blocker.process(sample) * outlevel.process(stime, outlevelv) * 10.f);
}

void onSampleRateChange(const SampleRateChangeEvent& e) override
{
dc_blocker.setFc(35.0f / e.sampleRate);

in_lpf.setFc(MAP(66.216f, 0.0f, 100.0f, INLPF_MAX_CO, INLPF_MIN_CO));
}

DISTRHO_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(AidaPluginModule)
};

// --------------------------------------------------------------------------------------------------------------------

#ifndef HEADLESS
struct AidaModelListWidget : ImGuiWidget {
AidaPluginModule* const module;

/*
bool showError = false;
String errorMessage;
*/

struct ghcFile {
std::string full, base;
bool operator<(const ghcFile& other) const noexcept { return base < other.base; }
};
std::string currentDirectory;
std::vector<ghcFile> currentFiles;
size_t selectedFile = (size_t)-1;

AidaModelListWidget(AidaPluginModule* const m)
: ImGuiWidget(),
module(m)
{
if (module->fileChanged)
reloadDir();
}

void drawImGui() override
{
const float scaleFactor = getScaleFactor();

const int flags = ImGuiWindowFlags_NoSavedSettings
| ImGuiWindowFlags_NoTitleBar
| ImGuiWindowFlags_NoResize
| ImGuiWindowFlags_NoCollapse
| ImGuiWindowFlags_NoScrollbar
| ImGuiWindowFlags_NoScrollWithMouse;

ImGui::SetNextWindowPos(ImVec2(0, 0));
ImGui::SetNextWindowSize(ImVec2(box.size.x * scaleFactor, box.size.y * scaleFactor));

if (ImGui::Begin("Model File List", nullptr, ImGuiWindowFlags_NoTitleBar|ImGuiWindowFlags_NoResize))
{
/*
if (showError)
{
showError = false;
ImGui::OpenPopup("Audio File Error");
}

if (ImGui::BeginPopupModal("Model File Error", nullptr, flags))
{
ImGui::TextWrapped("Failed to load model file, error was:\n%s", errorMessage.buffer());

ImGui::Separator();

if (ImGui::Button("Ok"))
ImGui::CloseCurrentPopup();

ImGui::EndPopup();
}
else
*/
if (ImGui::BeginTable("modellist", 1, ImGuiTableFlags_NoSavedSettings))
{
for (size_t i=0, count=currentFiles.size(); i < count; ++i)
{
bool wasSelected = selectedFile == i;
bool selected = wasSelected;
ImGui::TableNextRow();
ImGui::TableSetColumnIndex(0);
ImGui::Selectable(currentFiles[i].base.c_str(), &selected);

if (selected && ! wasSelected)
{
selectedFile = i;
module->currentFile = currentFiles[i].full;
module->loadModelFromFile(currentFiles[i].full.c_str());
}
}

ImGui::EndTable();
}
}

ImGui::End();
}

void step() override
{
if (module->fileChanged)
reloadDir();

ImGuiWidget::step();
}

void reloadDir()
{
module->fileChanged = false;

currentFiles.clear();
selectedFile = (size_t)-1;

static constexpr const char* const supportedExtensions[] = {
".json"
};

using namespace ghc::filesystem;
const path currentFile = u8path(module->currentFile);
currentDirectory = currentFile.parent_path().generic_u8string();

directory_iterator it;

try {
it = directory_iterator(u8path(currentDirectory));
} DISTRHO_SAFE_EXCEPTION_RETURN("Failed to open current directory",);

for (directory_iterator itb = begin(it), ite=end(it); itb != ite; ++itb)
{
if (! itb->is_regular_file())
continue;
const path filepath = itb->path();
const path extension = filepath.extension();
for (size_t i=0; i<ARRAY_SIZE(supportedExtensions); ++i)
{
if (extension.compare(supportedExtensions[i]) == 0)
{
currentFiles.push_back({ filepath.generic_u8string(), filepath.filename().generic_u8string() });
break;
}
}
}

std::sort(currentFiles.begin(), currentFiles.end());

for (size_t index = 0; index < currentFiles.size(); ++index)
{
if (currentFiles[index].full.compare(currentFile) == 0)
{
selectedFile = index;
break;
}
}
}
};

struct AidaKnob : app::SvgKnob {
AidaKnob()
{
minAngle = -0.76 * M_PI;
maxAngle = 0.76 * M_PI;
shadow->opacity = 0;
setSvg(APP->window->loadSvg(asset::plugin(pluginInstance, "res/aida-x-knob.svg")));
}
};

struct AidaWidget : ModuleWidgetWithSideScrews<23> {
static constexpr const float previewBoxHeight = 80.0f;
static constexpr const float previewBoxBottom = 20.0f;
static constexpr const float previewBoxRect[] = {8.0f,
380.0f - previewBoxHeight - previewBoxBottom,
15.0f * 23 - 16.0f,
previewBoxHeight};
static constexpr const float startY_list = startY - 2.0f;
static constexpr const float fileListHeight = 380.0f - startY_list - previewBoxHeight - previewBoxBottom * 1.5f;
static constexpr const float startY_preview = startY_list + fileListHeight;

AidaPluginModule* const module;

AidaWidget(AidaPluginModule* const m)
: module(m)
{
setModule(module);
setPanel(APP->window->loadSvg(asset::plugin(pluginInstance, "res/AIDA-X.svg")));

createAndAddScrews();

addInput(createInput<PJ301MPort>(Vec(startX_In, 25), module, 0));
addOutput(createOutput<PJ301MPort>(Vec(startX_Out, 25), module, 0));

addChild(createParamCentered<AidaKnob>(Vec(box.size.x * 0.5f - 50, box.size.y - 60),
module, AidaPluginModule::PARAM_INPUT_LEVEL));

addChild(createParamCentered<AidaKnob>(Vec(box.size.x * 0.5f + 50, box.size.y - 60),
module, AidaPluginModule::PARAM_OUTPUT_LEVEL));

if (m != nullptr)
{
AidaModelListWidget* const listw = new AidaModelListWidget(m);
listw->box.pos = Vec(0, startY_list);
listw->box.size = Vec(box.size.x, fileListHeight);
addChild(listw);
}
}

void draw(const DrawArgs& args) override
{
drawBackground(args.vg);
drawOutputJacksArea(args.vg);

ModuleWidget::draw(args);
}

void drawOutputJacksArea(NVGcontext* const vg)
{
nvgBeginPath(vg);
nvgRoundedRect(vg, startX_Out - 2.5f, startY_list * 0.5f - padding * 0.5f, padding, padding, 4);
nvgFillColor(vg, nvgRGB(0xd0, 0xd0, 0xd0));
nvgFill(vg);
}

void appendContextMenu(ui::Menu* const menu) override
{
menu->addChild(new ui::MenuSeparator);

struct LoadModelFileItem : MenuItem {
AidaPluginModule* const module;

LoadModelFileItem(AidaPluginModule* const m)
: module(m)
{
text = "Load model file...";
}

void onAction(const event::Action&) override
{
AidaPluginModule* const module = this->module;
async_dialog_filebrowser(false, nullptr, nullptr, text.c_str(), [module](char* path)
{
if (path == nullptr)
return;

module->currentFile = path;
module->fileChanged = true;
module->loadModelFromFile(path);
std::free(path);
});
}
};

menu->addChild(new LoadModelFileItem(module));
}

DISTRHO_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(AidaWidget)
};
#else
struct AidaWidget : ModuleWidget {
AidaWidget(AidaPluginModule* const module) {
setModule(module);

addInput(createInput<PJ301MPort>({}, module, 0));
addOutput(createOutput<PJ301MPort>({}, module, 0));
}
};
#endif

// --------------------------------------------------------------------------------------------------------------------

Model* modelAidaX = createModel<AidaPluginModule, AidaWidget>("AIDA-X");

// --------------------------------------------------------------------------------------------------------------------

+ 165
- 0
plugins/Cardinal/src/AIDA-X/Biquad.cpp View File

@@ -0,0 +1,165 @@
//
// Biquad.cpp
//
// Created by Nigel Redmon on 11/24/12
// EarLevel Engineering: earlevel.com
// Copyright 2012 Nigel Redmon
//
// For a complete explanation of the Biquad code:
// http://www.earlevel.com/main/2012/11/26/biquad-c-source-code/
//
// License:
//
// This source code is provided as is, without warranty.
// You may copy and distribute verbatim copies of this document.
// You may modify and use this source code to create binary code
// for your own purposes, free or commercial.
//

#include <math.h>
#include "Biquad.h"

Biquad::Biquad() {
type = bq_type_lowpass;
a0 = 1.0;
a1 = a2 = b1 = b2 = 0.0;
Fc = 0.50;
Q = 0.707;
peakGain = 0.0;
z1 = z2 = 0.0;
}

Biquad::Biquad(int type, double Fc, double Q, double peakGainDB) {
setBiquad(type, Fc, Q, peakGainDB);
z1 = z2 = 0.0;
}

Biquad::~Biquad() {
}

void Biquad::setType(int type) {
this->type = type;
calcBiquad();
}

void Biquad::setQ(double Q) {
this->Q = Q;
calcBiquad();
}

void Biquad::setFc(double Fc) {
this->Fc = Fc;
calcBiquad();
}

void Biquad::setPeakGain(double peakGainDB) {
this->peakGain = peakGainDB;
calcBiquad();
}

void Biquad::setBiquad(int type, double Fc, double Q, double peakGainDB) {
this->type = type;
this->Q = Q;
this->Fc = Fc;
setPeakGain(peakGainDB);
}

void Biquad::calcBiquad(void) {
double norm;
double V = pow(10, fabs(peakGain) / 20.0);
double K = tan(M_PI * Fc);
switch (this->type) {
case bq_type_lowpass:
norm = 1 / (1 + K / Q + K * K);
a0 = K * K * norm;
a1 = 2 * a0;
a2 = a0;
b1 = 2 * (K * K - 1) * norm;
b2 = (1 - K / Q + K * K) * norm;
break;

case bq_type_highpass:
norm = 1 / (1 + K / Q + K * K);
a0 = 1 * norm;
a1 = -2 * a0;
a2 = a0;
b1 = 2 * (K * K - 1) * norm;
b2 = (1 - K / Q + K * K) * norm;
break;

case bq_type_bandpass:
norm = 1 / (1 + K / Q + K * K);
a0 = K / Q * norm;
a1 = 0;
a2 = -a0;
b1 = 2 * (K * K - 1) * norm;
b2 = (1 - K / Q + K * K) * norm;
break;

case bq_type_notch:
norm = 1 / (1 + K / Q + K * K);
a0 = (1 + K * K) * norm;
a1 = 2 * (K * K - 1) * norm;
a2 = a0;
b1 = a1;
b2 = (1 - K / Q + K * K) * norm;
break;

case bq_type_peak:
if (peakGain >= 0) { // boost
norm = 1 / (1 + 1/Q * K + K * K);
a0 = (1 + V/Q * K + K * K) * norm;
a1 = 2 * (K * K - 1) * norm;
a2 = (1 - V/Q * K + K * K) * norm;
b1 = a1;
b2 = (1 - 1/Q * K + K * K) * norm;
}
else { // cut
norm = 1 / (1 + V/Q * K + K * K);
a0 = (1 + 1/Q * K + K * K) * norm;
a1 = 2 * (K * K - 1) * norm;
a2 = (1 - 1/Q * K + K * K) * norm;
b1 = a1;
b2 = (1 - V/Q * K + K * K) * norm;
}
break;
case bq_type_lowshelf:
if (peakGain >= 0) { // boost
norm = 1 / (1 + sqrt(2) * K + K * K);
a0 = (1 + sqrt(2*V) * K + V * K * K) * norm;
a1 = 2 * (V * K * K - 1) * norm;
a2 = (1 - sqrt(2*V) * K + V * K * K) * norm;
b1 = 2 * (K * K - 1) * norm;
b2 = (1 - sqrt(2) * K + K * K) * norm;
}
else { // cut
norm = 1 / (1 + sqrt(2*V) * K + V * K * K);
a0 = (1 + sqrt(2) * K + K * K) * norm;
a1 = 2 * (K * K - 1) * norm;
a2 = (1 - sqrt(2) * K + K * K) * norm;
b1 = 2 * (V * K * K - 1) * norm;
b2 = (1 - sqrt(2*V) * K + V * K * K) * norm;
}
break;
case bq_type_highshelf:
if (peakGain >= 0) { // boost
norm = 1 / (1 + sqrt(2) * K + K * K);
a0 = (V + sqrt(2*V) * K + K * K) * norm;
a1 = 2 * (K * K - V) * norm;
a2 = (V - sqrt(2*V) * K + K * K) * norm;
b1 = 2 * (K * K - 1) * norm;
b2 = (1 - sqrt(2) * K + K * K) * norm;
}
else { // cut
norm = 1 / (V + sqrt(2*V) * K + K * K);
a0 = (1 + sqrt(2) * K + K * K) * norm;
a1 = 2 * (K * K - 1) * norm;
a2 = (1 - sqrt(2) * K + K * K) * norm;
b1 = 2 * (K * K - V) * norm;
b2 = (V - sqrt(2*V) * K + K * K) * norm;
}
break;
}

return;
}

+ 60
- 0
plugins/Cardinal/src/AIDA-X/Biquad.h View File

@@ -0,0 +1,60 @@
//
// Biquad.h
//
// Created by Nigel Redmon on 11/24/12
// EarLevel Engineering: earlevel.com
// Copyright 2012 Nigel Redmon
//
// For a complete explanation of the Biquad code:
// http://www.earlevel.com/main/2012/11/26/biquad-c-source-code/
//
// License:
//
// This source code is provided as is, without warranty.
// You may copy and distribute verbatim copies of this document.
// You may modify and use this source code to create binary code
// for your own purposes, free or commercial.
//

#ifndef Biquad_h
#define Biquad_h

enum {
bq_type_lowpass = 0,
bq_type_highpass,
bq_type_bandpass,
bq_type_notch,
bq_type_peak,
bq_type_lowshelf,
bq_type_highshelf
};

class Biquad {
public:
Biquad();
Biquad(int type, double Fc, double Q, double peakGainDB);
~Biquad();
void setType(int type);
void setQ(double Q);
void setFc(double Fc);
void setPeakGain(double peakGainDB);
void setBiquad(int type, double Fc, double Q, double peakGainDB);
float process(float in);

protected:
void calcBiquad(void);

int type;
double a0, a1, a2, b1, b2;
double Fc, Q, peakGain;
double z1, z2;
};

inline float Biquad::process(float in) {
double out = in * a0 + z1;
z1 = in * a1 + z2 - b1 * out;
z2 = in * a2 - b2 * out;
return out;
}

#endif // Biquad_h

+ 1
- 0
plugins/Cardinal/src/AIDA-X/RTNeural

@@ -0,0 +1 @@
Subproject commit 74e9d354937346f31858e976a2eefc1c25cdcccd

+ 683
- 0
plugins/Cardinal/src/AIDA-X/model_variant.hpp View File

@@ -0,0 +1,683 @@
#include <variant>
#include <RTNeural/RTNeural.h>

#define MAX_INPUT_SIZE 3
struct NullModel { static constexpr int input_size = 0; static constexpr int output_size = 0; };
using ModelType_GRU_8_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 8>, RTNeural::DenseT<float, 8, 1>>;
using ModelType_GRU_8_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 8>, RTNeural::DenseT<float, 8, 1>>;
using ModelType_GRU_8_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 8>, RTNeural::DenseT<float, 8, 1>>;
using ModelType_GRU_12_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 12>, RTNeural::DenseT<float, 12, 1>>;
using ModelType_GRU_12_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 12>, RTNeural::DenseT<float, 12, 1>>;
using ModelType_GRU_12_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 12>, RTNeural::DenseT<float, 12, 1>>;
using ModelType_GRU_16_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 16>, RTNeural::DenseT<float, 16, 1>>;
using ModelType_GRU_16_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 16>, RTNeural::DenseT<float, 16, 1>>;
using ModelType_GRU_16_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 16>, RTNeural::DenseT<float, 16, 1>>;
using ModelType_GRU_20_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 20>, RTNeural::DenseT<float, 20, 1>>;
using ModelType_GRU_20_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 20>, RTNeural::DenseT<float, 20, 1>>;
using ModelType_GRU_20_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 20>, RTNeural::DenseT<float, 20, 1>>;
using ModelType_GRU_32_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 32>, RTNeural::DenseT<float, 32, 1>>;
using ModelType_GRU_32_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 32>, RTNeural::DenseT<float, 32, 1>>;
using ModelType_GRU_32_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 32>, RTNeural::DenseT<float, 32, 1>>;
using ModelType_GRU_40_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 40>, RTNeural::DenseT<float, 40, 1>>;
using ModelType_GRU_40_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 40>, RTNeural::DenseT<float, 40, 1>>;
using ModelType_GRU_40_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 40>, RTNeural::DenseT<float, 40, 1>>;
using ModelType_GRU_64_1 = RTNeural::ModelT<float, 1, 1, RTNeural::GRULayerT<float, 1, 64>, RTNeural::DenseT<float, 64, 1>>;
using ModelType_GRU_64_2 = RTNeural::ModelT<float, 2, 1, RTNeural::GRULayerT<float, 2, 64>, RTNeural::DenseT<float, 64, 1>>;
using ModelType_GRU_64_3 = RTNeural::ModelT<float, 3, 1, RTNeural::GRULayerT<float, 3, 64>, RTNeural::DenseT<float, 64, 1>>;
using ModelType_LSTM_8_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 8>, RTNeural::DenseT<float, 8, 1>>;
using ModelType_LSTM_8_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 8>, RTNeural::DenseT<float, 8, 1>>;
using ModelType_LSTM_8_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 8>, RTNeural::DenseT<float, 8, 1>>;
using ModelType_LSTM_12_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 12>, RTNeural::DenseT<float, 12, 1>>;
using ModelType_LSTM_12_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 12>, RTNeural::DenseT<float, 12, 1>>;
using ModelType_LSTM_12_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 12>, RTNeural::DenseT<float, 12, 1>>;
using ModelType_LSTM_16_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 16>, RTNeural::DenseT<float, 16, 1>>;
using ModelType_LSTM_16_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 16>, RTNeural::DenseT<float, 16, 1>>;
using ModelType_LSTM_16_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 16>, RTNeural::DenseT<float, 16, 1>>;
using ModelType_LSTM_20_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 20>, RTNeural::DenseT<float, 20, 1>>;
using ModelType_LSTM_20_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 20>, RTNeural::DenseT<float, 20, 1>>;
using ModelType_LSTM_20_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 20>, RTNeural::DenseT<float, 20, 1>>;
using ModelType_LSTM_32_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 32>, RTNeural::DenseT<float, 32, 1>>;
using ModelType_LSTM_32_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 32>, RTNeural::DenseT<float, 32, 1>>;
using ModelType_LSTM_32_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 32>, RTNeural::DenseT<float, 32, 1>>;
using ModelType_LSTM_40_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 40>, RTNeural::DenseT<float, 40, 1>>;
using ModelType_LSTM_40_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 40>, RTNeural::DenseT<float, 40, 1>>;
using ModelType_LSTM_40_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 40>, RTNeural::DenseT<float, 40, 1>>;
using ModelType_LSTM_64_1 = RTNeural::ModelT<float, 1, 1, RTNeural::LSTMLayerT<float, 1, 64>, RTNeural::DenseT<float, 64, 1>>;
using ModelType_LSTM_64_2 = RTNeural::ModelT<float, 2, 1, RTNeural::LSTMLayerT<float, 2, 64>, RTNeural::DenseT<float, 64, 1>>;
using ModelType_LSTM_64_3 = RTNeural::ModelT<float, 3, 1, RTNeural::LSTMLayerT<float, 3, 64>, RTNeural::DenseT<float, 64, 1>>;
using ModelVariantType = std::variant<NullModel,ModelType_GRU_8_1,ModelType_GRU_8_2,ModelType_GRU_8_3,ModelType_GRU_12_1,ModelType_GRU_12_2,ModelType_GRU_12_3,ModelType_GRU_16_1,ModelType_GRU_16_2,ModelType_GRU_16_3,ModelType_GRU_20_1,ModelType_GRU_20_2,ModelType_GRU_20_3,ModelType_GRU_32_1,ModelType_GRU_32_2,ModelType_GRU_32_3,ModelType_GRU_40_1,ModelType_GRU_40_2,ModelType_GRU_40_3,ModelType_GRU_64_1,ModelType_GRU_64_2,ModelType_GRU_64_3,ModelType_LSTM_8_1,ModelType_LSTM_8_2,ModelType_LSTM_8_3,ModelType_LSTM_12_1,ModelType_LSTM_12_2,ModelType_LSTM_12_3,ModelType_LSTM_16_1,ModelType_LSTM_16_2,ModelType_LSTM_16_3,ModelType_LSTM_20_1,ModelType_LSTM_20_2,ModelType_LSTM_20_3,ModelType_LSTM_32_1,ModelType_LSTM_32_2,ModelType_LSTM_32_3,ModelType_LSTM_40_1,ModelType_LSTM_40_2,ModelType_LSTM_40_3,ModelType_LSTM_64_1,ModelType_LSTM_64_2,ModelType_LSTM_64_3>;

inline bool is_model_type_ModelType_GRU_8_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 8;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_8_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 8;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_8_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 8;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_12_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 12;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_12_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 12;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_12_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 12;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_16_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 16;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_16_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 16;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_16_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 16;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_20_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 20;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_20_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 20;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_20_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 20;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_32_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 32;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_32_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 32;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_32_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 32;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_40_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 40;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_40_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 40;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_40_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 40;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_64_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 64;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_64_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 64;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_GRU_64_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "gru";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 64;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_8_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 8;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_8_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 8;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_8_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 8;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_12_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 12;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_12_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 12;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_12_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 12;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_16_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 16;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_16_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 16;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_16_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 16;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_20_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 20;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_20_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 20;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_20_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 20;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_32_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 32;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_32_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 32;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_32_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 32;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_40_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 40;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_40_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 40;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_40_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 40;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_64_1 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 64;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 1;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_64_2 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 64;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 2;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool is_model_type_ModelType_LSTM_64_3 (const nlohmann::json& model_json) {
const auto json_layers = model_json.at ("layers");
const auto rnn_layer_type = json_layers.at (0).at ("type").get<std::string>();
const auto is_layer_type_correct = rnn_layer_type == "lstm";
const auto hidden_size = json_layers.at (0).at ("shape").back().get<int>();
const auto is_hidden_size_correct = hidden_size == 64;
const auto input_size = model_json.at ("in_shape").back().get<int>();
const auto is_input_size_correct = input_size == 3;
return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
}

inline bool custom_model_creator (const nlohmann::json& model_json, ModelVariantType& model) {
if (is_model_type_ModelType_GRU_8_1 (model_json)) {
model.emplace<ModelType_GRU_8_1>();
return true;
}
else if (is_model_type_ModelType_GRU_8_2 (model_json)) {
model.emplace<ModelType_GRU_8_2>();
return true;
}
else if (is_model_type_ModelType_GRU_8_3 (model_json)) {
model.emplace<ModelType_GRU_8_3>();
return true;
}
else if (is_model_type_ModelType_GRU_12_1 (model_json)) {
model.emplace<ModelType_GRU_12_1>();
return true;
}
else if (is_model_type_ModelType_GRU_12_2 (model_json)) {
model.emplace<ModelType_GRU_12_2>();
return true;
}
else if (is_model_type_ModelType_GRU_12_3 (model_json)) {
model.emplace<ModelType_GRU_12_3>();
return true;
}
else if (is_model_type_ModelType_GRU_16_1 (model_json)) {
model.emplace<ModelType_GRU_16_1>();
return true;
}
else if (is_model_type_ModelType_GRU_16_2 (model_json)) {
model.emplace<ModelType_GRU_16_2>();
return true;
}
else if (is_model_type_ModelType_GRU_16_3 (model_json)) {
model.emplace<ModelType_GRU_16_3>();
return true;
}
else if (is_model_type_ModelType_GRU_20_1 (model_json)) {
model.emplace<ModelType_GRU_20_1>();
return true;
}
else if (is_model_type_ModelType_GRU_20_2 (model_json)) {
model.emplace<ModelType_GRU_20_2>();
return true;
}
else if (is_model_type_ModelType_GRU_20_3 (model_json)) {
model.emplace<ModelType_GRU_20_3>();
return true;
}
else if (is_model_type_ModelType_GRU_32_1 (model_json)) {
model.emplace<ModelType_GRU_32_1>();
return true;
}
else if (is_model_type_ModelType_GRU_32_2 (model_json)) {
model.emplace<ModelType_GRU_32_2>();
return true;
}
else if (is_model_type_ModelType_GRU_32_3 (model_json)) {
model.emplace<ModelType_GRU_32_3>();
return true;
}
else if (is_model_type_ModelType_GRU_40_1 (model_json)) {
model.emplace<ModelType_GRU_40_1>();
return true;
}
else if (is_model_type_ModelType_GRU_40_2 (model_json)) {
model.emplace<ModelType_GRU_40_2>();
return true;
}
else if (is_model_type_ModelType_GRU_40_3 (model_json)) {
model.emplace<ModelType_GRU_40_3>();
return true;
}
else if (is_model_type_ModelType_GRU_64_1 (model_json)) {
model.emplace<ModelType_GRU_64_1>();
return true;
}
else if (is_model_type_ModelType_GRU_64_2 (model_json)) {
model.emplace<ModelType_GRU_64_2>();
return true;
}
else if (is_model_type_ModelType_GRU_64_3 (model_json)) {
model.emplace<ModelType_GRU_64_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_8_1 (model_json)) {
model.emplace<ModelType_LSTM_8_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_8_2 (model_json)) {
model.emplace<ModelType_LSTM_8_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_8_3 (model_json)) {
model.emplace<ModelType_LSTM_8_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_12_1 (model_json)) {
model.emplace<ModelType_LSTM_12_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_12_2 (model_json)) {
model.emplace<ModelType_LSTM_12_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_12_3 (model_json)) {
model.emplace<ModelType_LSTM_12_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_16_1 (model_json)) {
model.emplace<ModelType_LSTM_16_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_16_2 (model_json)) {
model.emplace<ModelType_LSTM_16_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_16_3 (model_json)) {
model.emplace<ModelType_LSTM_16_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_20_1 (model_json)) {
model.emplace<ModelType_LSTM_20_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_20_2 (model_json)) {
model.emplace<ModelType_LSTM_20_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_20_3 (model_json)) {
model.emplace<ModelType_LSTM_20_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_32_1 (model_json)) {
model.emplace<ModelType_LSTM_32_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_32_2 (model_json)) {
model.emplace<ModelType_LSTM_32_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_32_3 (model_json)) {
model.emplace<ModelType_LSTM_32_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_40_1 (model_json)) {
model.emplace<ModelType_LSTM_40_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_40_2 (model_json)) {
model.emplace<ModelType_LSTM_40_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_40_3 (model_json)) {
model.emplace<ModelType_LSTM_40_3>();
return true;
}
else if (is_model_type_ModelType_LSTM_64_1 (model_json)) {
model.emplace<ModelType_LSTM_64_1>();
return true;
}
else if (is_model_type_ModelType_LSTM_64_2 (model_json)) {
model.emplace<ModelType_LSTM_64_2>();
return true;
}
else if (is_model_type_ModelType_LSTM_64_3 (model_json)) {
model.emplace<ModelType_LSTM_64_3>();
return true;
}
model.emplace<NullModel>();
return false;
}

+ 1
- 0
plugins/Cardinal/src/plugin.hpp View File

@@ -28,6 +28,7 @@ using namespace rack;

extern Plugin* pluginInstance;

extern Model* modelAidaX;
extern Model* modelAudioFile;
extern Model* modelAudioToCVPitch;
extern Model* modelCarla;


+ 28
- 0
plugins/Makefile View File

@@ -223,6 +223,7 @@ MINIPLUGIN_FILES = plugins-mini.cpp
# --------------------------------------------------------------
# Cardinal (built-in)

PLUGIN_FILES += Cardinal/src/AIDA-X.cpp
PLUGIN_FILES += Cardinal/src/Blank.cpp
PLUGIN_FILES += Cardinal/src/ExpanderInputMIDI.cpp
PLUGIN_FILES += Cardinal/src/ExpanderOutputMIDI.cpp
@@ -1306,6 +1307,20 @@ ifeq ($(WASM),true)
CARLA_FLAGS += -DDISTRHO_RUNNER_INDIRECT_WASM_CALLS
endif

# --------------------------------------------------------------
# RTNeural flags, used in AIDA-X

RTNEURAL_FLAGS = -std=gnu++17
RTNEURAL_FLAGS += -DRTNEURAL_DEFAULT_ALIGNMENT=16
RTNEURAL_FLAGS += -DRTNEURAL_USE_EIGEN=1

ifeq ($(WASM),true)
RTNEURAL_FLAGS += -DEIGEN_DONT_VECTORIZE=1
endif

RTNEURAL_FLAGS += -ICardinal/src/AIDA-X/RTNeural
RTNEURAL_FLAGS += -ICardinal/src/AIDA-X/RTNeural/modules/Eigen

# --------------------------------------------------------------
# Build targets

@@ -1975,6 +1990,19 @@ $(BUILD_DIR)/Cardinal/%.cpp.o: Cardinal/%.cpp
-Dstbrp_rect=stbrp_rect_cardinal \
$(CARLA_FLAGS)

$(BUILD_DIR)/Cardinal/src/AIDA-X.cpp.o: Cardinal/src/AIDA-X.cpp
-@mkdir -p "$(shell dirname $(BUILD_DIR)/$<)"
@echo "Compiling $<"
$(SILENT)$(CXX) $< $(BUILD_CXX_FLAGS) -c -o $@ \
-DpluginInstance=pluginInstance__Cardinal \
-Dstbrp_context=stbrp_context_cardinal \
-Dstbrp_coord=stbrp_coord_cardinal \
-Dstbtt_fontinfo=stbtt_fontinfo_cardinal \
-Dstbrp_node=stbrp_node_cardinal \
-Dstbrp_rect=stbrp_rect_cardinal \
$(RTNEURAL_FLAGS) \
$(CARLA_FLAGS)

$(BUILD_DIR)/21kHz/%.cpp.o: 21kHz/%.cpp
-@mkdir -p "$(shell dirname $(BUILD_DIR)/$<)"
@echo "Compiling $<"


+ 1
- 0
plugins/plugins-mini.cpp View File

@@ -212,6 +212,7 @@ static void initStatic__Cardinal()
/*
#endif
*/
spl.removeModule("AIDA-X");
spl.removeModule("AudioFile");
spl.removeModule("Blank");
spl.removeModule("Carla");


+ 1
- 0
plugins/plugins.cpp View File

@@ -972,6 +972,7 @@ static void initStatic__Cardinal()
const StaticPluginLoader spl(p, "Cardinal");
if (spl.ok())
{
p->addModel(modelAidaX);
p->addModel(modelCardinalBlank);
p->addModel(modelExpanderInputMIDI);
p->addModel(modelExpanderOutputMIDI);


Loading…
Cancel
Save