diff --git a/.gitmodules b/.gitmodules
index 6241e86..cd1f6ca 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -227,3 +227,6 @@
[submodule "plugins/Sapphire"]
path = plugins/Sapphire
url = https://github.com/cosinekitty/sapphire.git
+[submodule "plugins/Cardinal/src/AIDA-X/RTNeural"]
+ path = plugins/Cardinal/src/AIDA-X/RTNeural
+ url = https://github.com/jatinchowdhury18/RTNeural.git
diff --git a/plugins/Cardinal/orig/AIDA-X.svg b/plugins/Cardinal/orig/AIDA-X.svg
new file mode 100644
index 0000000..58d5190
--- /dev/null
+++ b/plugins/Cardinal/orig/AIDA-X.svg
@@ -0,0 +1,137 @@
+
+
+
+
diff --git a/plugins/Cardinal/plugin.json b/plugins/Cardinal/plugin.json
index ff96849..4086063 100644
--- a/plugins/Cardinal/plugin.json
+++ b/plugins/Cardinal/plugin.json
@@ -117,6 +117,16 @@
"Visual"
]
},
+ {
+ "slug": "AIDA-X",
+ "name": "AIDA-X",
+ "description": "Amp Model Player leveraging AI",
+ "manualUrl": "https://github.com/DISTRHO/Cardinal/blob/main/docs/CARDINAL-MODULES.md#aidax",
+ "tags": [
+ "Distortion",
+ "Effect"
+ ]
+ },
{
"slug": "Blank",
"name": "Blank",
diff --git a/plugins/Cardinal/res/AIDA-X.svg b/plugins/Cardinal/res/AIDA-X.svg
new file mode 100644
index 0000000..1cc3277
--- /dev/null
+++ b/plugins/Cardinal/res/AIDA-X.svg
@@ -0,0 +1,184 @@
+
+
+
+
diff --git a/plugins/Cardinal/res/aida-x-knob.svg b/plugins/Cardinal/res/aida-x-knob.svg
new file mode 100644
index 0000000..278ac6e
--- /dev/null
+++ b/plugins/Cardinal/res/aida-x-knob.svg
@@ -0,0 +1,8 @@
+
+
+
diff --git a/plugins/Cardinal/res/aida-x-scale.svg b/plugins/Cardinal/res/aida-x-scale.svg
new file mode 100644
index 0000000..020bd1e
--- /dev/null
+++ b/plugins/Cardinal/res/aida-x-scale.svg
@@ -0,0 +1,35 @@
+
+
+
+
diff --git a/plugins/Cardinal/src/AIDA-X.cpp b/plugins/Cardinal/src/AIDA-X.cpp
new file mode 100644
index 0000000..6f62b2e
--- /dev/null
+++ b/plugins/Cardinal/src/AIDA-X.cpp
@@ -0,0 +1,605 @@
+/*
+ * AIDA-X Cardinal plugin
+ * Copyright (C) 2022-2023 Massimo Pennazio
+ * Copyright (C) 2023 Filipe Coelho
+ * SPDX-License-Identifier: GPL-3.0-or-later
+ */
+
+#include "plugincontext.hpp"
+#include "ModuleWidgets.hpp"
+
+#include "extra/Sleep.hpp"
+
+#include "AIDA-X/Biquad.cpp"
+#include "AIDA-X/model_variant.hpp"
+
+#ifndef HEADLESS
+# include "ImGuiWidget.hpp"
+# include "ghc/filesystem.hpp"
+#endif
+
+// --------------------------------------------------------------------------------------------------------------------
+
+/* Define a constexpr for converting a gain in dB to a coefficient */
+static constexpr float DB_CO(const float g) { return g > -90.f ? std::pow(10.f, g * 0.05f) : 0.f; }
+
+/* Define a macro to re-maps a number from one range to another */
+static constexpr float MAP(const float x, const float in_min, const float in_max, const float out_min, const float out_max)
+{
+ return ((x - in_min) * (out_max - out_min) / (in_max - in_min)) + out_min;
+}
+
+/* Defines for tone controls */
+static constexpr const float COMMON_Q = 0.707f;
+
+/* Defines for antialiasing filter */
+static constexpr const float INLPF_MAX_CO = 0.99f * 0.5f; /* coeff * ((samplerate / 2) / samplerate) */
+static constexpr const float INLPF_MIN_CO = 0.25f * 0.5f; /* coeff * ((samplerate / 2) / samplerate) */
+
+// --------------------------------------------------------------------------------------------------------------------
+
+struct DynamicModel {
+ ModelVariantType variant;
+ bool input_skip; /* Means the model has been trained with first input element skipped to the output */
+ float input_gain;
+ float output_gain;
+};
+
+// --------------------------------------------------------------------------------------------------------------------
+// This function carries model calculations
+
+static inline
+void applyModel(DynamicModel* model, float* const out, uint32_t numSamples)
+{
+ const bool input_skip = model->input_skip;
+ const float input_gain = model->input_gain;
+ const float output_gain = model->output_gain;
+
+ std::visit(
+ [&out, numSamples, input_skip, input_gain, output_gain] (auto&& custom_model)
+ {
+ using ModelType = std::decay_t;
+
+ if (d_isNotEqual(input_gain, 1.f))
+ {
+ for (uint32_t i=0; ivariant
+ );
+}
+
+static inline
+float applyModel(DynamicModel* model, float sample)
+{
+ const bool input_skip = model->input_skip;
+ const float input_gain = model->input_gain;
+ const float output_gain = model->output_gain;
+
+ sample *= input_gain;
+
+ std::visit(
+ [&sample, input_skip, output_gain] (auto&& custom_model)
+ {
+ using ModelType = std::decay_t;
+ float* out = &sample;
+
+ if constexpr (ModelType::input_size == 1)
+ {
+ if (input_skip)
+ {
+ sample += custom_model.forward(out);
+ sample *= output_gain;
+ }
+ else
+ {
+ sample = custom_model.forward(out) * output_gain;
+ }
+ }
+ },
+ model->variant
+ );
+
+ return sample;
+}
+
+// --------------------------------------------------------------------------------------------------------------------
+
+struct AidaPluginModule : Module {
+ enum ParamIds {
+ PARAM_INPUT_LEVEL,
+ PARAM_OUTPUT_LEVEL,
+ NUM_PARAMS
+ };
+ enum InputIds {
+ AUDIO_INPUT,
+ NUM_INPUTS
+ };
+ enum OutputIds {
+ AUDIO_OUTPUT,
+ NUM_OUTPUTS
+ };
+ enum LightIds {
+ NUM_LIGHTS
+ };
+
+ enum Parameters {
+ kParameterCount
+ };
+
+ CardinalPluginContext* const pcontext;
+ bool fileChanged = false;
+ std::string currentFile;
+
+ Biquad dc_blocker { bq_type_highpass, 0.5f, COMMON_Q, 0.0f };
+ Biquad in_lpf { bq_type_lowpass, 0.5f, COMMON_Q, 0.0f };
+ dsp::ExponentialFilter inlevel;
+ dsp::ExponentialFilter outlevel;
+ DynamicModel* model = nullptr;
+ std::atomic activeModel { false };
+
+ AidaPluginModule()
+ : pcontext(static_cast(APP))
+ {
+ config(NUM_PARAMS, NUM_INPUTS, NUM_OUTPUTS, NUM_LIGHTS);
+
+ configInput(AUDIO_INPUT, "Audio");
+ configOutput(AUDIO_OUTPUT, "Audio");
+ configParam(PARAM_INPUT_LEVEL, -12.f, 12.f, 0.f, "Input level", " dB");
+ configParam(PARAM_OUTPUT_LEVEL, -12.f, 12.f, 0.f, "Output level", " dB");
+
+ inlevel.setTau(1 / 30.f);
+ outlevel.setTau(1 / 30.f);
+ }
+
+ ~AidaPluginModule() override
+ {
+ delete model;
+ }
+
+ json_t* dataToJson() override
+ {
+ json_t* const rootJ = json_object();
+ DISTRHO_SAFE_ASSERT_RETURN(rootJ != nullptr, nullptr);
+
+ json_object_set_new(rootJ, "filepath", json_string(currentFile.c_str()));
+
+ return rootJ;
+ }
+
+ void dataFromJson(json_t* const rootJ) override
+ {
+ fileChanged = false;
+
+ if (json_t* const filepathJ = json_object_get(rootJ, "filepath"))
+ {
+ const char* const filepath = json_string_value(filepathJ);
+
+ if (filepath[0] != '\0')
+ {
+ currentFile = filepath;
+ fileChanged = true;
+
+ loadModelFromFile(filepath);
+ }
+ }
+
+ if (! fileChanged)
+ {
+ currentFile.clear();
+ fileChanged = true;
+ }
+ }
+
+ void loadModelFromFile(const char* const filename)
+ {
+ try {
+ std::ifstream jsonStream(filename, std::ifstream::binary);
+ loadModelFromStream(jsonStream);
+ }
+ catch (const std::exception& e) {
+ d_stderr2("Unable to load json file: %s\nError: %s", filename, e.what());
+ };
+ }
+
+ void loadModelFromStream(std::istream& jsonStream)
+ {
+ int input_size;
+ int input_skip;
+ float input_gain;
+ float output_gain;
+ nlohmann::json model_json;
+
+ try {
+ jsonStream >> model_json;
+
+ /* Understand which model type to load */
+ input_size = model_json["in_shape"].back().get();
+ if (input_size > 1) { // MAX_INPUT_SIZE
+ throw std::invalid_argument("Value for input_size not supported");
+ }
+
+ if (model_json["in_skip"].is_number()) {
+ input_skip = model_json["in_skip"].get();
+ if (input_skip > 1)
+ throw std::invalid_argument("Values for in_skip > 1 are not supported");
+ }
+ else {
+ input_skip = 0;
+ }
+
+ if (model_json["in_gain"].is_number()) {
+ input_gain = DB_CO(model_json["in_gain"].get());
+ }
+ else {
+ input_gain = 1.0f;
+ }
+
+ if (model_json["out_gain"].is_number()) {
+ output_gain = DB_CO(model_json["out_gain"].get());
+ }
+ else {
+ output_gain = 1.0f;
+ }
+ }
+ catch (const std::exception& e) {
+ d_stderr2("Unable to load json, error: %s", e.what());
+ return;
+ }
+
+ std::unique_ptr newmodel = std::make_unique();
+
+ try {
+ if (! custom_model_creator (model_json, newmodel->variant))
+ throw std::runtime_error ("Unable to identify a known model architecture!");
+
+ std::visit (
+ [&model_json] (auto&& custom_model)
+ {
+ using ModelType = std::decay_t;
+ if constexpr (! std::is_same_v)
+ {
+ custom_model.parseJson (model_json, true);
+ custom_model.reset();
+ }
+ },
+ newmodel->variant);
+ }
+ catch (const std::exception& e) {
+ d_stderr2("Error loading model: %s", e.what());
+ return;
+ }
+
+ // save extra info
+ newmodel->input_skip = input_skip != 0;
+ newmodel->input_gain = input_gain;
+ newmodel->output_gain = output_gain;
+
+ // Pre-buffer to avoid "clicks" during initialization
+ float out[2048] = {};
+ applyModel(newmodel.get(), out, ARRAY_SIZE(out));
+
+ // swap active model
+ DynamicModel* const oldmodel = model;
+ model = newmodel.release();
+
+ // if processing, wait for process cycle to complete
+ while (oldmodel != nullptr && activeModel.load())
+ d_msleep(1);
+
+ delete oldmodel;
+ }
+
+ void process(const ProcessArgs& args) override
+ {
+ const float stime = args.sampleTime;
+ const float inlevelv = DB_CO(params[PARAM_INPUT_LEVEL].getValue());
+ const float outlevelv = DB_CO(params[PARAM_OUTPUT_LEVEL].getValue());
+
+ // High frequencies roll-off (lowpass)
+ float sample = in_lpf.process(inputs[AUDIO_INPUT].getVoltage() * 0.1f) * inlevel.process(stime, inlevelv);
+
+ // run model
+ if (model != nullptr)
+ {
+ activeModel.store(true);
+ sample = applyModel(model, sample);
+ activeModel.store(false);
+ }
+
+ // DC blocker filter (highpass)
+ outputs[AUDIO_OUTPUT].setVoltage(dc_blocker.process(sample) * outlevel.process(stime, outlevelv) * 10.f);
+ }
+
+ void onSampleRateChange(const SampleRateChangeEvent& e) override
+ {
+ dc_blocker.setFc(35.0f / e.sampleRate);
+
+ in_lpf.setFc(MAP(66.216f, 0.0f, 100.0f, INLPF_MAX_CO, INLPF_MIN_CO));
+ }
+
+ DISTRHO_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(AidaPluginModule)
+};
+
+// --------------------------------------------------------------------------------------------------------------------
+
+#ifndef HEADLESS
+struct AidaModelListWidget : ImGuiWidget {
+ AidaPluginModule* const module;
+
+ /*
+ bool showError = false;
+ String errorMessage;
+ */
+
+ struct ghcFile {
+ std::string full, base;
+ bool operator<(const ghcFile& other) const noexcept { return base < other.base; }
+ };
+ std::string currentDirectory;
+ std::vector currentFiles;
+ size_t selectedFile = (size_t)-1;
+
+ AidaModelListWidget(AidaPluginModule* const m)
+ : ImGuiWidget(),
+ module(m)
+ {
+ if (module->fileChanged)
+ reloadDir();
+ }
+
+ void drawImGui() override
+ {
+ const float scaleFactor = getScaleFactor();
+
+ const int flags = ImGuiWindowFlags_NoSavedSettings
+ | ImGuiWindowFlags_NoTitleBar
+ | ImGuiWindowFlags_NoResize
+ | ImGuiWindowFlags_NoCollapse
+ | ImGuiWindowFlags_NoScrollbar
+ | ImGuiWindowFlags_NoScrollWithMouse;
+
+ ImGui::SetNextWindowPos(ImVec2(0, 0));
+ ImGui::SetNextWindowSize(ImVec2(box.size.x * scaleFactor, box.size.y * scaleFactor));
+
+ if (ImGui::Begin("Model File List", nullptr, ImGuiWindowFlags_NoTitleBar|ImGuiWindowFlags_NoResize))
+ {
+ /*
+ if (showError)
+ {
+ showError = false;
+ ImGui::OpenPopup("Audio File Error");
+ }
+
+ if (ImGui::BeginPopupModal("Model File Error", nullptr, flags))
+ {
+ ImGui::TextWrapped("Failed to load model file, error was:\n%s", errorMessage.buffer());
+
+ ImGui::Separator();
+
+ if (ImGui::Button("Ok"))
+ ImGui::CloseCurrentPopup();
+
+ ImGui::EndPopup();
+ }
+ else
+ */
+ if (ImGui::BeginTable("modellist", 1, ImGuiTableFlags_NoSavedSettings))
+ {
+ for (size_t i=0, count=currentFiles.size(); i < count; ++i)
+ {
+ bool wasSelected = selectedFile == i;
+ bool selected = wasSelected;
+ ImGui::TableNextRow();
+ ImGui::TableSetColumnIndex(0);
+ ImGui::Selectable(currentFiles[i].base.c_str(), &selected);
+
+ if (selected && ! wasSelected)
+ {
+ selectedFile = i;
+ module->currentFile = currentFiles[i].full;
+ module->loadModelFromFile(currentFiles[i].full.c_str());
+ }
+ }
+
+ ImGui::EndTable();
+ }
+ }
+
+ ImGui::End();
+ }
+
+ void step() override
+ {
+ if (module->fileChanged)
+ reloadDir();
+
+ ImGuiWidget::step();
+ }
+
+ void reloadDir()
+ {
+ module->fileChanged = false;
+
+ currentFiles.clear();
+ selectedFile = (size_t)-1;
+
+ static constexpr const char* const supportedExtensions[] = {
+ ".json"
+ };
+
+ using namespace ghc::filesystem;
+ const path currentFile = u8path(module->currentFile);
+ currentDirectory = currentFile.parent_path().generic_u8string();
+
+ directory_iterator it;
+
+ try {
+ it = directory_iterator(u8path(currentDirectory));
+ } DISTRHO_SAFE_EXCEPTION_RETURN("Failed to open current directory",);
+
+ for (directory_iterator itb = begin(it), ite=end(it); itb != ite; ++itb)
+ {
+ if (! itb->is_regular_file())
+ continue;
+ const path filepath = itb->path();
+ const path extension = filepath.extension();
+ for (size_t i=0; iopacity = 0;
+ setSvg(APP->window->loadSvg(asset::plugin(pluginInstance, "res/aida-x-knob.svg")));
+ }
+};
+
+struct AidaWidget : ModuleWidgetWithSideScrews<23> {
+ static constexpr const float previewBoxHeight = 80.0f;
+ static constexpr const float previewBoxBottom = 20.0f;
+ static constexpr const float previewBoxRect[] = {8.0f,
+ 380.0f - previewBoxHeight - previewBoxBottom,
+ 15.0f * 23 - 16.0f,
+ previewBoxHeight};
+ static constexpr const float startY_list = startY - 2.0f;
+ static constexpr const float fileListHeight = 380.0f - startY_list - previewBoxHeight - previewBoxBottom * 1.5f;
+ static constexpr const float startY_preview = startY_list + fileListHeight;
+
+ AidaPluginModule* const module;
+
+ AidaWidget(AidaPluginModule* const m)
+ : module(m)
+ {
+ setModule(module);
+ setPanel(APP->window->loadSvg(asset::plugin(pluginInstance, "res/AIDA-X.svg")));
+
+ createAndAddScrews();
+
+ addInput(createInput(Vec(startX_In, 25), module, 0));
+ addOutput(createOutput(Vec(startX_Out, 25), module, 0));
+
+ addChild(createParamCentered(Vec(box.size.x * 0.5f - 50, box.size.y - 60),
+ module, AidaPluginModule::PARAM_INPUT_LEVEL));
+
+ addChild(createParamCentered(Vec(box.size.x * 0.5f + 50, box.size.y - 60),
+ module, AidaPluginModule::PARAM_OUTPUT_LEVEL));
+
+ if (m != nullptr)
+ {
+ AidaModelListWidget* const listw = new AidaModelListWidget(m);
+ listw->box.pos = Vec(0, startY_list);
+ listw->box.size = Vec(box.size.x, fileListHeight);
+ addChild(listw);
+ }
+ }
+
+ void draw(const DrawArgs& args) override
+ {
+ drawBackground(args.vg);
+ drawOutputJacksArea(args.vg);
+
+ ModuleWidget::draw(args);
+ }
+
+ void drawOutputJacksArea(NVGcontext* const vg)
+ {
+ nvgBeginPath(vg);
+ nvgRoundedRect(vg, startX_Out - 2.5f, startY_list * 0.5f - padding * 0.5f, padding, padding, 4);
+ nvgFillColor(vg, nvgRGB(0xd0, 0xd0, 0xd0));
+ nvgFill(vg);
+ }
+
+ void appendContextMenu(ui::Menu* const menu) override
+ {
+ menu->addChild(new ui::MenuSeparator);
+
+ struct LoadModelFileItem : MenuItem {
+ AidaPluginModule* const module;
+
+ LoadModelFileItem(AidaPluginModule* const m)
+ : module(m)
+ {
+ text = "Load model file...";
+ }
+
+ void onAction(const event::Action&) override
+ {
+ AidaPluginModule* const module = this->module;
+ async_dialog_filebrowser(false, nullptr, nullptr, text.c_str(), [module](char* path)
+ {
+ if (path == nullptr)
+ return;
+
+ module->currentFile = path;
+ module->fileChanged = true;
+ module->loadModelFromFile(path);
+ std::free(path);
+ });
+ }
+ };
+
+ menu->addChild(new LoadModelFileItem(module));
+ }
+
+ DISTRHO_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(AidaWidget)
+};
+#else
+struct AidaWidget : ModuleWidget {
+ AidaWidget(AidaPluginModule* const module) {
+ setModule(module);
+
+ addInput(createInput({}, module, 0));
+ addOutput(createOutput({}, module, 0));
+ }
+};
+#endif
+
+// --------------------------------------------------------------------------------------------------------------------
+
+Model* modelAidaX = createModel("AIDA-X");
+
+// --------------------------------------------------------------------------------------------------------------------
diff --git a/plugins/Cardinal/src/AIDA-X/Biquad.cpp b/plugins/Cardinal/src/AIDA-X/Biquad.cpp
new file mode 100644
index 0000000..d01e29b
--- /dev/null
+++ b/plugins/Cardinal/src/AIDA-X/Biquad.cpp
@@ -0,0 +1,165 @@
+//
+// Biquad.cpp
+//
+// Created by Nigel Redmon on 11/24/12
+// EarLevel Engineering: earlevel.com
+// Copyright 2012 Nigel Redmon
+//
+// For a complete explanation of the Biquad code:
+// http://www.earlevel.com/main/2012/11/26/biquad-c-source-code/
+//
+// License:
+//
+// This source code is provided as is, without warranty.
+// You may copy and distribute verbatim copies of this document.
+// You may modify and use this source code to create binary code
+// for your own purposes, free or commercial.
+//
+
+#include
+#include "Biquad.h"
+
+Biquad::Biquad() {
+ type = bq_type_lowpass;
+ a0 = 1.0;
+ a1 = a2 = b1 = b2 = 0.0;
+ Fc = 0.50;
+ Q = 0.707;
+ peakGain = 0.0;
+ z1 = z2 = 0.0;
+}
+
+Biquad::Biquad(int type, double Fc, double Q, double peakGainDB) {
+ setBiquad(type, Fc, Q, peakGainDB);
+ z1 = z2 = 0.0;
+}
+
+Biquad::~Biquad() {
+}
+
+void Biquad::setType(int type) {
+ this->type = type;
+ calcBiquad();
+}
+
+void Biquad::setQ(double Q) {
+ this->Q = Q;
+ calcBiquad();
+}
+
+void Biquad::setFc(double Fc) {
+ this->Fc = Fc;
+ calcBiquad();
+}
+
+void Biquad::setPeakGain(double peakGainDB) {
+ this->peakGain = peakGainDB;
+ calcBiquad();
+}
+
+void Biquad::setBiquad(int type, double Fc, double Q, double peakGainDB) {
+ this->type = type;
+ this->Q = Q;
+ this->Fc = Fc;
+ setPeakGain(peakGainDB);
+}
+
+void Biquad::calcBiquad(void) {
+ double norm;
+ double V = pow(10, fabs(peakGain) / 20.0);
+ double K = tan(M_PI * Fc);
+ switch (this->type) {
+ case bq_type_lowpass:
+ norm = 1 / (1 + K / Q + K * K);
+ a0 = K * K * norm;
+ a1 = 2 * a0;
+ a2 = a0;
+ b1 = 2 * (K * K - 1) * norm;
+ b2 = (1 - K / Q + K * K) * norm;
+ break;
+
+ case bq_type_highpass:
+ norm = 1 / (1 + K / Q + K * K);
+ a0 = 1 * norm;
+ a1 = -2 * a0;
+ a2 = a0;
+ b1 = 2 * (K * K - 1) * norm;
+ b2 = (1 - K / Q + K * K) * norm;
+ break;
+
+ case bq_type_bandpass:
+ norm = 1 / (1 + K / Q + K * K);
+ a0 = K / Q * norm;
+ a1 = 0;
+ a2 = -a0;
+ b1 = 2 * (K * K - 1) * norm;
+ b2 = (1 - K / Q + K * K) * norm;
+ break;
+
+ case bq_type_notch:
+ norm = 1 / (1 + K / Q + K * K);
+ a0 = (1 + K * K) * norm;
+ a1 = 2 * (K * K - 1) * norm;
+ a2 = a0;
+ b1 = a1;
+ b2 = (1 - K / Q + K * K) * norm;
+ break;
+
+ case bq_type_peak:
+ if (peakGain >= 0) { // boost
+ norm = 1 / (1 + 1/Q * K + K * K);
+ a0 = (1 + V/Q * K + K * K) * norm;
+ a1 = 2 * (K * K - 1) * norm;
+ a2 = (1 - V/Q * K + K * K) * norm;
+ b1 = a1;
+ b2 = (1 - 1/Q * K + K * K) * norm;
+ }
+ else { // cut
+ norm = 1 / (1 + V/Q * K + K * K);
+ a0 = (1 + 1/Q * K + K * K) * norm;
+ a1 = 2 * (K * K - 1) * norm;
+ a2 = (1 - 1/Q * K + K * K) * norm;
+ b1 = a1;
+ b2 = (1 - V/Q * K + K * K) * norm;
+ }
+ break;
+ case bq_type_lowshelf:
+ if (peakGain >= 0) { // boost
+ norm = 1 / (1 + sqrt(2) * K + K * K);
+ a0 = (1 + sqrt(2*V) * K + V * K * K) * norm;
+ a1 = 2 * (V * K * K - 1) * norm;
+ a2 = (1 - sqrt(2*V) * K + V * K * K) * norm;
+ b1 = 2 * (K * K - 1) * norm;
+ b2 = (1 - sqrt(2) * K + K * K) * norm;
+ }
+ else { // cut
+ norm = 1 / (1 + sqrt(2*V) * K + V * K * K);
+ a0 = (1 + sqrt(2) * K + K * K) * norm;
+ a1 = 2 * (K * K - 1) * norm;
+ a2 = (1 - sqrt(2) * K + K * K) * norm;
+ b1 = 2 * (V * K * K - 1) * norm;
+ b2 = (1 - sqrt(2*V) * K + V * K * K) * norm;
+ }
+ break;
+ case bq_type_highshelf:
+ if (peakGain >= 0) { // boost
+ norm = 1 / (1 + sqrt(2) * K + K * K);
+ a0 = (V + sqrt(2*V) * K + K * K) * norm;
+ a1 = 2 * (K * K - V) * norm;
+ a2 = (V - sqrt(2*V) * K + K * K) * norm;
+ b1 = 2 * (K * K - 1) * norm;
+ b2 = (1 - sqrt(2) * K + K * K) * norm;
+ }
+ else { // cut
+ norm = 1 / (V + sqrt(2*V) * K + K * K);
+ a0 = (1 + sqrt(2) * K + K * K) * norm;
+ a1 = 2 * (K * K - 1) * norm;
+ a2 = (1 - sqrt(2) * K + K * K) * norm;
+ b1 = 2 * (K * K - V) * norm;
+ b2 = (V - sqrt(2*V) * K + K * K) * norm;
+ }
+ break;
+ }
+
+ return;
+}
diff --git a/plugins/Cardinal/src/AIDA-X/Biquad.h b/plugins/Cardinal/src/AIDA-X/Biquad.h
new file mode 100644
index 0000000..ee1b955
--- /dev/null
+++ b/plugins/Cardinal/src/AIDA-X/Biquad.h
@@ -0,0 +1,60 @@
+//
+// Biquad.h
+//
+// Created by Nigel Redmon on 11/24/12
+// EarLevel Engineering: earlevel.com
+// Copyright 2012 Nigel Redmon
+//
+// For a complete explanation of the Biquad code:
+// http://www.earlevel.com/main/2012/11/26/biquad-c-source-code/
+//
+// License:
+//
+// This source code is provided as is, without warranty.
+// You may copy and distribute verbatim copies of this document.
+// You may modify and use this source code to create binary code
+// for your own purposes, free or commercial.
+//
+
+#ifndef Biquad_h
+#define Biquad_h
+
+enum {
+ bq_type_lowpass = 0,
+ bq_type_highpass,
+ bq_type_bandpass,
+ bq_type_notch,
+ bq_type_peak,
+ bq_type_lowshelf,
+ bq_type_highshelf
+};
+
+class Biquad {
+public:
+ Biquad();
+ Biquad(int type, double Fc, double Q, double peakGainDB);
+ ~Biquad();
+ void setType(int type);
+ void setQ(double Q);
+ void setFc(double Fc);
+ void setPeakGain(double peakGainDB);
+ void setBiquad(int type, double Fc, double Q, double peakGainDB);
+ float process(float in);
+
+protected:
+ void calcBiquad(void);
+
+ int type;
+ double a0, a1, a2, b1, b2;
+ double Fc, Q, peakGain;
+ double z1, z2;
+};
+
+inline float Biquad::process(float in) {
+ double out = in * a0 + z1;
+ z1 = in * a1 + z2 - b1 * out;
+ z2 = in * a2 - b2 * out;
+ return out;
+}
+
+#endif // Biquad_h
diff --git a/plugins/Cardinal/src/AIDA-X/RTNeural b/plugins/Cardinal/src/AIDA-X/RTNeural
new file mode 160000
index 0000000..74e9d35
--- /dev/null
+++ b/plugins/Cardinal/src/AIDA-X/RTNeural
@@ -0,0 +1 @@
+Subproject commit 74e9d354937346f31858e976a2eefc1c25cdcccd
diff --git a/plugins/Cardinal/src/AIDA-X/model_variant.hpp b/plugins/Cardinal/src/AIDA-X/model_variant.hpp
new file mode 100644
index 0000000..b730591
--- /dev/null
+++ b/plugins/Cardinal/src/AIDA-X/model_variant.hpp
@@ -0,0 +1,683 @@
+#include
+#include
+
+#define MAX_INPUT_SIZE 3
+struct NullModel { static constexpr int input_size = 0; static constexpr int output_size = 0; };
+using ModelType_GRU_8_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_8_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_8_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_12_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_12_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_12_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_16_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_16_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_16_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_20_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_20_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_20_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_32_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_32_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_32_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_40_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_40_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_40_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_64_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_64_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_GRU_64_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_8_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_8_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_8_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_12_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_12_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_12_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_16_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_16_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_16_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_20_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_20_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_20_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_32_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_32_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_32_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_40_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_40_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_40_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_64_1 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_64_2 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelType_LSTM_64_3 = RTNeural::ModelT, RTNeural::DenseT>;
+using ModelVariantType = std::variant;
+
+inline bool is_model_type_ModelType_GRU_8_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 8;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_8_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 8;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_8_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 8;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_12_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 12;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_12_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 12;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_12_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 12;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_16_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 16;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_16_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 16;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_16_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 16;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_20_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 20;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_20_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 20;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_20_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 20;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_32_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 32;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_32_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 32;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_32_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 32;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_40_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 40;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_40_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 40;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_40_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 40;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_64_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 64;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_64_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 64;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_GRU_64_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "gru";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 64;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_8_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 8;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_8_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 8;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_8_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 8;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_12_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 12;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_12_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 12;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_12_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 12;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_16_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 16;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_16_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 16;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_16_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 16;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_20_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 20;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_20_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 20;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_20_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 20;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_32_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 32;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_32_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 32;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_32_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 32;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_40_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 40;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_40_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 40;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_40_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 40;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_64_1 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 64;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 1;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_64_2 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 64;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 2;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool is_model_type_ModelType_LSTM_64_3 (const nlohmann::json& model_json) {
+ const auto json_layers = model_json.at ("layers");
+ const auto rnn_layer_type = json_layers.at (0).at ("type").get();
+ const auto is_layer_type_correct = rnn_layer_type == "lstm";
+ const auto hidden_size = json_layers.at (0).at ("shape").back().get();
+ const auto is_hidden_size_correct = hidden_size == 64;
+ const auto input_size = model_json.at ("in_shape").back().get();
+ const auto is_input_size_correct = input_size == 3;
+ return is_layer_type_correct && is_hidden_size_correct && is_input_size_correct;
+}
+
+inline bool custom_model_creator (const nlohmann::json& model_json, ModelVariantType& model) {
+ if (is_model_type_ModelType_GRU_8_1 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_8_2 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_8_3 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_12_1 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_12_2 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_12_3 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_16_1 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_16_2 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_16_3 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_20_1 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_20_2 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_20_3 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_32_1 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_32_2 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_32_3 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_40_1 (model_json)) {
+ model.emplace();
+ return true;
+ }
+ else if (is_model_type_ModelType_GRU_40_2 (model_json)) {
+ model.emplace