diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..ea1006571 --- /dev/null +++ b/.gitignore @@ -0,0 +1,101 @@ +# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,windows,linux +# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,macos,windows,linux + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# Environment variables +.env +.venv + +# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,windows,linux \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 000000000..c8326a571 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# Gloo lang + +A DSL for AI diff --git a/cli/.gitignore b/cli/.gitignore new file mode 100644 index 000000000..097d2d45f --- /dev/null +++ b/cli/.gitignore @@ -0,0 +1,20 @@ +# Created by https://www.toptal.com/developers/gitignore/api/rust +# Edit at https://www.toptal.com/developers/gitignore?templates=rust + +### Rust ### +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +# End of https://www.toptal.com/developers/gitignore/api/rust \ No newline at end of file diff --git a/cli/Cargo.toml b/cli/Cargo.toml new file mode 100644 index 000000000..2a64a42a9 --- /dev/null +++ b/cli/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "gloo" +version = "0.2.4" +edition = "2021" +build = "build.rs" + +[dependencies] +walkdir = "2" +libc = "0.2" +clap = "2.33" +log = "0.4" +env_logger = "0.9" +thiserror = "1.0" +colored = "2.0" +pretty_env_logger = "0.5" +yaml-rust = "*" +semver = "1.0.18" + +[build-dependencies] +cc = "1.0" +walkdir = "2" +which = "4.0" diff --git a/cli/build.rs b/cli/build.rs new file mode 100644 index 000000000..c25db4055 --- /dev/null +++ b/cli/build.rs @@ -0,0 +1,57 @@ +extern crate cc; +extern crate which; + +use std::env; +use std::path::Path; +use walkdir::WalkDir; +use which::which; + +// Build the C++ code into a static library +fn main() { + // Check if ccache is available on the system + if let Ok(ccache_path) = which("ccache") { + env::set_var("CC", &ccache_path); + // print out the path to ccache + println!("cargo:warning=Using ccache at {}", ccache_path.display()); + } + + let mut build = cc::Build::new(); + build.cpp(true).warnings(true); + + let cpp_path = Path::new("cpp_src"); + if cpp_path.exists() && cpp_path.is_dir() { + for entry in WalkDir::new(cpp_path) { + let entry = entry.unwrap(); + let path = entry.path(); + if path.is_file() && path.extension().unwrap_or_default() == "cc" { + build.file(path); + } + } + } + + #[cfg(debug_assertions)] + build.flag_if_supported("-O0").flag_if_supported("-g"); + + #[cfg(not(debug_assertions))] + build.flag_if_supported("-O2"); + + build.include(cpp_path); + + // Determine if we're targeting MSVC + let target = env::var("TARGET").unwrap(); + if target.contains("msvc") { + // Flags for MSVC + build.flag("/W4").flag("/WX").flag("/std:c++20").flag("/EHsc"); + } else { + // If mac, set MACOSX_DEPLOYMENT_TARGET: 11.0 + if target.contains("apple") { + println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=11.0"); + } + // Flags for GCC/Clang + build.flag("-Wall").flag("-Wextra").flag("-Werror").flag("-std=c++2a"); + } + + build.compile("program"); + + println!("cargo:rerun-if-changed=cpp_src/"); +} diff --git a/cli/cpp_src/recieve_data.cc b/cli/cpp_src/recieve_data.cc new file mode 100644 index 000000000..dac339287 --- /dev/null +++ b/cli/cpp_src/recieve_data.cc @@ -0,0 +1,113 @@ +#include +#include +#include +#include +#include + +#include "variant/ast/ast.h" +#include "variant/ast/utils.h" +#include "variant/error.h" +#include "variant/generate/dir_writer.h" +#include "variant/post_process/dependency_graph.h" +#include "variant/post_process/validate.h" +#include "variant/tokenizer/tokenizer.h" + +void generate(const std::string &out_dir, + const std::map &file_map) { + std::unordered_map file_nodes; + for (const auto &pair : file_map) { + auto tokens = gloo::Tokenizer::Tokenize(pair.first, pair.second); + auto nodes = gloo::AST::Parser(tokens); + file_nodes[pair.first] = nodes; + } + + // Combine all the nodes into one AST + gloo::AST::Nodes nodes; + for (auto pair : file_nodes) { + for (auto &item : pair.second.enums) { + nodes.enums.push_back(item); + } + + for (auto &item : pair.second.classes) { + nodes.classes.push_back(item); + } + + for (auto &item : pair.second.functions) { + nodes.functions.push_back(item); + } + + for (auto &[func, group] : pair.second.function_test_groups) { + nodes.function_test_groups[func].insert( + nodes.function_test_groups[func].end(), group.begin(), group.end()); + } + + for (auto &[func, variants] : pair.second.function_variants) { + nodes.function_variants[func].insert(nodes.function_variants[func].end(), + variants.begin(), variants.end()); + } + for (auto &item : pair.second.clients) { + nodes.clients.push_back(item); + } + } + + gloo::PostProcess::Validate(nodes); + + auto [order, deps] = gloo::PostProcess::BuildDependencyGraph(nodes); + + // Print the nodes in the order they should be processed + for (const auto &node : order) { + node->toPython(deps.at(node->uniqueName())); + } + + // Write the __init__.py files + gloo::DirectoryWriter::get().file("__init__.py"); + gloo::DirectoryWriter::get().flush(out_dir); +} + +extern "C" { +int receive_data(const char *out_dir, const char **filenames, + const char **contents, int len, char *error_msg) { + std::map file_map; + for (int i = 0; i < len; i++) { + file_map[filenames[i]] = contents[i]; + } + + try { + generate(out_dir, file_map); + return 0; + } catch (const gloo::GlooError &e) { + if (error_msg) { +// Copy the exception's error message to the provided buffer +#ifdef _WIN32 + strncpy_s(error_msg, 255, e.what().data(), 255); +#else + strncpy(error_msg, e.what().data(), 255); +#endif + error_msg[255] = '\0'; // Null-terminate just to be sure + } + return 1; // Error + } catch (const std::exception &e) { + if (error_msg) { + // Copy the exception's error message to the provided buffer +#ifdef _WIN32 + strncpy_s(error_msg, 255, e.what(), 255); +#else + strncpy(error_msg, e.what(), 255); +#endif + error_msg[255] = '\0'; // Null-terminate just to be sure + } + return 2; // Error + } catch (...) { + if (error_msg) { + // Copy the exception's error message to the provided buffer +#ifdef _WIN32 + strncpy_s(error_msg, 255, "Unknown error", sizeof("Unknown error")); +#else + strncpy(error_msg, "Unknown error", 255); +#endif + error_msg[255] = '\0'; // Null-terminate just to be sure + } + return 3; // Error + } +} +} diff --git a/cli/cpp_src/variant/ast/ast.cc b/cli/cpp_src/variant/ast/ast.cc new file mode 100644 index 000000000..edb678cb0 --- /dev/null +++ b/cli/cpp_src/variant/ast/ast.cc @@ -0,0 +1,51 @@ +#include "variant/ast/ast.h" + +#include +#include + +#include "variant/ast/utils.h" + +namespace gloo::AST { +using namespace Tokenizer; + +Nodes Parser(const std::vector &tokens) { + Nodes nodes; + + auto it = tokens.begin(); + while (it->kind == TokenKind::AtSymbol) { + ++it; + // Parse the tokens + switch (it->kind) { + case TokenKind::EnumKeyword: + nodes.enums.push_back(EnumNode::Parser(it)); + break; + case TokenKind::ClassKeyword: + nodes.classes.push_back(ClassNode::Parser(it)); + break; + case TokenKind::FunctionKeyword: + nodes.functions.push_back(FunctionNode::Parser(it)); + break; + case TokenKind::VariantKeyword: { + for (auto res : VariantBaseNode::Parser(it)) { + nodes.function_variants[res->functionName].push_back(res); + } + break; + } + case TokenKind::TestGroupKeyword: { + auto res = TestGroupNode::Parser(it); + nodes.function_test_groups[res->functionName].push_back(res); + break; + } + case TokenKind::ClientKeyword: + nodes.clients.push_back(LLMClientNode::Parser(it)); + break; + default: + throw SyntaxError(*it, "Unexpected token: " + it->value); + } + } + if (it->kind != TokenKind::Eof) { + throw SyntaxError(*it, "Did you forget @? Got: " + it->value); + } + return nodes; +} +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/ast.h b/cli/cpp_src/variant/ast/ast.h new file mode 100644 index 000000000..fd75b25ae --- /dev/null +++ b/cli/cpp_src/variant/ast/ast.h @@ -0,0 +1,23 @@ +#pragma once + +#include "variant/ast/functions/node_function.h" +#include "variant/ast/types/node_class.h" +#include "variant/ast/types/node_enum.h" +#include "variant/ast/types/node_llm_client.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +struct Nodes { + std::vector> enums; + std::vector> classes; + std::vector> functions; + std::vector> clients; + std::unordered_map>> + function_variants; + std::unordered_map>> + function_test_groups; +}; + +Nodes Parser(const std::vector &tokens); +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/functions/node_function.cc b/cli/cpp_src/variant/ast/functions/node_function.cc new file mode 100644 index 000000000..80ba25c3d --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/node_function.cc @@ -0,0 +1,46 @@ +#include "variant/ast/functions/node_function.h" + +#include + +#include "variant/ast/utils.h" + +namespace gloo::AST { +using namespace Tokenizer; + +std::string FunctionNode::toString() const { + std::stringstream ss; + ss << "Function: " << name << std::endl; + ss << " Input: " << *input << std::endl; + ss << " Output: " << *output << std::endl; + return ss.str(); +} + +std::shared_ptr FunctionNode::Parser( + std::vector::const_iterator &it) { + const Tokenizer::Token &start_token = *it; + ensureTokenKind(*it++, TokenKind::FunctionKeyword); + std::string name = ParseName(it); + ensureTokenKind(*it++, TokenKind::LeftCurlyBracket); + + ensureTokenKind(*it++, Tokenizer::TokenKind::AtSymbol); + ensureTokenKind(*it++, TokenKind::InputKeyword); + auto input = TypeNode::Parser(it); + + ensureTokenKind(*it++, Tokenizer::TokenKind::AtSymbol); + ensureTokenKind(*it++, TokenKind::OutputKeyword); + auto output = TypeNode::Parser(it); + + ensureTokenKind(*it++, TokenKind::RightCurlyBracket); + + return std::shared_ptr( + new FunctionNode(start_token, name, input, output)); +} + +void FunctionNode::validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + input->validate(class_names, enum_names); + output->validate(class_names, enum_names); +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/functions/node_function.h b/cli/cpp_src/variant/ast/functions/node_function.h new file mode 100644 index 000000000..e72e6a7e9 --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/node_function.h @@ -0,0 +1,63 @@ +#pragma once +#include +#include +#include +#include +#include + +#include "variant/ast/functions/tests/test.h" +#include "variant/ast/functions/variants/variant_base.h" +#include "variant/ast/node.h" +#include "variant/ast/shared/node_type.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +class FunctionNode : public OutputNode { + public: + FunctionNode(const Tokenizer::Token token, const std::string &name, + const std::shared_ptr &input, + const std::shared_ptr &output) + : OutputNode(token, name), input(input), output(output) {} + NodeOrder order() const { return NodeOrder::FUNCTION; } + PYTHONIC(); + + void link(const std::vector> &classes, + const std::vector> &enums) { + input->link(classes, enums); + output->link(classes, enums); + } + + const std::shared_ptr input; + const std::shared_ptr output; + std::vector> variants; + std::vector> test_groups; + + std::string toString() const; + + void validate(const std::unordered_set &class_names, + const std::unordered_set &enum_names) const; + + void addVariant(std::shared_ptr node) { + variants.push_back(node); + node->function = this; + } + void addTestGroup(std::shared_ptr node) { + test_groups.push_back(node); + node->function = this; + } + + static std::shared_ptr Parser( + std::vector::const_iterator &it); + + std::vector dependencies() const { + std::vector deps; + auto input_deps = input->dependencies(); + deps.insert(deps.end(), input_deps.begin(), input_deps.end()); + auto output_deps = output->dependencies(); + deps.insert(deps.end(), output_deps.begin(), output_deps.end()); + return deps; + } +}; + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/functions/tests/test.cc b/cli/cpp_src/variant/ast/functions/tests/test.cc new file mode 100644 index 000000000..bbb7e3776 --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/tests/test.cc @@ -0,0 +1,87 @@ +#include "variant/ast/functions/tests/test.h" + +#include "variant/ast/utils.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +std::shared_ptr TestGroupNode::Parser( + std::vector::const_iterator &it) { + const auto &start_token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::TestGroupKeyword); + const auto &name = ParseName(it); + const auto &forToken = *it; + const auto &forKeyword = ParseName(it); + if (forKeyword != "for") { + throw SyntaxError(forToken, "Expected 'for' keyword. Got: " + forKeyword); + } + const auto &functionName = ParseName(it); + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + + std::vector> cases; + std::vector> methods; + while (it->kind == Tokenizer::TokenKind::AtSymbol) { + ++it; + switch (it->kind) { + case Tokenizer::TokenKind::TestCaseKeyword: + cases.push_back(TestCaseNode::Parser(cases.size(), it)); + break; + case Tokenizer::TokenKind::InputKeyword: { + const auto &token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::InputKeyword); + auto value = ParseString(it); + cases.push_back(std::shared_ptr(new TestCaseNode( + token, "case_" + std::to_string(cases.size()), value, {}))); + break; + } + case Tokenizer::TokenKind::MethodKeyword: + methods.push_back(MethodNode::Parser(it)); + break; + default: + throw SyntaxError( + *it, "Unexpected token parsing 'test_group': " + it->value); + } + } + + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + auto group = std::shared_ptr( + new TestGroupNode(start_token, name, functionName, cases, methods)); + return group; +} + +std::shared_ptr TestCaseNode::Parser( + size_t index, std::vector::const_iterator &it) { + const auto &start_token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::TestCaseKeyword); + std::string name = "case_" + std::to_string(index); + if (it->kind == Tokenizer::TokenKind::Identifier) { + name = it->value; + it++; + } + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + bool sawInput = false; + std::string value = ""; + std::vector> methods; + while (it->kind == Tokenizer::TokenKind::AtSymbol) { + ++it; + switch (it->kind) { + case Tokenizer::TokenKind::InputKeyword: + if (sawInput) { + throw SyntaxError(*it, "Duplicate input."); + } + sawInput = true; + value = ParseString(++it); + break; + case Tokenizer::TokenKind::MethodKeyword: + methods.push_back(MethodNode::Parser(it)); + break; + default: + throw SyntaxError(*it, "Unexpected token parsing 'case': " + it->value); + } + } + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + return std::shared_ptr( + new TestCaseNode(start_token, name, value, methods)); +} + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/functions/tests/test.h b/cli/cpp_src/variant/ast/functions/tests/test.h new file mode 100644 index 000000000..85d9b5eaf --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/tests/test.h @@ -0,0 +1,84 @@ +#pragma once +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/ast/shared/node_method.h" +#include "variant/error.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +class TestCaseNode : public AstNode { + public: + TestCaseNode(const Tokenizer::Token &token, const std::string &name, + const std::string &value, + const std::vector> &methods) + : AstNode(token), name(name), value(value), methods(methods) {} + + const std::string name; + const std::string value; + const std::vector> methods; + + std::string toString() const { + std::string result = "TestCase[" + name + "]"; + return result; + } + + static std::shared_ptr Parser( + size_t index, std::vector::const_iterator &it); +}; + +class FunctionNode; + +class TestGroupNode : public OutputNode { + public: + TestGroupNode(const Tokenizer::Token &token, const std::string &name, + const std::string &functionName, + const std::vector> &cases, + const std::vector> &methods) + : OutputNode(token, name), + name(name), + functionName(functionName), + cases(cases), + methods(methods) {} + + virtual NodeOrder order() const { return NodeOrder::TEST_GROUP; } + PYTHONIC(); + + const std::string name; + const std::string functionName; + const std::vector> cases; + const std::vector> methods; + + std::string toString() const { + std::string result = functionName + "::test_group[" + name + "]"; + return result; + } + + static std::shared_ptr Parser( + std::vector::const_iterator &it); + + FunctionNode *function = nullptr; + + void validate(const std::unordered_set &function_names) { + if (function_names.find(functionName) == function_names.end()) { + throw SyntaxError(token, "Function not found: " + functionName); + } + std::unordered_map> names; + for (auto &test_case : cases) { + names[test_case->name].push_back(test_case->token); + } + + for (auto &pair : names) { + if (pair.second.size() > 1) { + throw DuplicateError(pair.second, + name + ": Duplicate test case: " + pair.first); + } + } + } +}; + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/functions/variants/code_variant.h b/cli/cpp_src/variant/ast/functions/variants/code_variant.h new file mode 100644 index 000000000..2cc5a28a2 --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/variants/code_variant.h @@ -0,0 +1,43 @@ +#pragma once +#include + +#include "variant/ast/functions/variants/variant_base.h" +#include "variant/error.h" + +namespace gloo::AST { + +class CodeVariantNode : public VariantBaseNode { + public: + CodeVariantNode(const Tokenizer::Token &token, const std::string &name, + const std::string &functionName, + const std::vector &usedFunction, + const std::vector> &methods) + : VariantBaseNode(token, name, functionName), + usedFunction(usedFunction), + methods(methods) {} + PYTHONIC(); + + virtual std::vector dependencies() const; + + const std::vector usedFunction; + const std::vector> methods; + + virtual std::string type() const { return "code"; } + + void validate(const std::unordered_set &, + const std::unordered_set &, + const std::unordered_set &function_names, + const std::unordered_set &) const { + for (const auto &func : usedFunction) { + if (function_names.find(func) == function_names.end()) { + throw UndefinedError(token, "Dependency not found: " + func); + } + } + } + + static std::shared_ptr Parser( + const std::string &functionName, const std::string &variantName, + std::vector::const_iterator &it); +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/functions/variants/llm_variant.h b/cli/cpp_src/variant/ast/functions/variants/llm_variant.h new file mode 100644 index 000000000..72986d655 --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/variants/llm_variant.h @@ -0,0 +1,42 @@ +#pragma once +#include + +#include "variant/ast/functions/variants/variant_base.h" +#include "variant/ast/shared/node_method.h" +#include "variant/ast/shared/node_stringify.h" + +namespace gloo::AST { +class LLMVariantNode : public VariantBaseNode { + public: + LLMVariantNode(const Tokenizer::Token &token, const std::string &name, + const std::string &functionName, + const std::string &client_name, const std::string &prompt, + const std::vector> &stringify, + const std::vector> &methods) + : VariantBaseNode(token, name, functionName), + client_name(client_name), + prompt(prompt), + stringify(stringify), + methods(methods) {} + + const std::string client_name; + const std::string prompt; + const std::vector> stringify; + const std::vector> methods; + + PYTHONIC(); + virtual std::vector dependencies() const; + + virtual std::string type() const { return "llm"; } + + void validate(const std::unordered_set &, + const std::unordered_set &, + const std::unordered_set &, + const std::unordered_set &client_names) const; + + static std::vector> Parser( + const std::string &functionName, const std::string &variantName, + std::vector::const_iterator &it); +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/functions/variants/variant.cc b/cli/cpp_src/variant/ast/functions/variants/variant.cc new file mode 100644 index 000000000..5364a55b3 --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/variants/variant.cc @@ -0,0 +1,188 @@ +#include "variant/ast/functions/node_function.h" +#include "variant/ast/functions/variants/code_variant.h" +#include "variant/ast/functions/variants/llm_variant.h" +#include "variant/ast/functions/variants/variant_base.h" +#include "variant/ast/shared/node_method.h" +#include "variant/ast/shared/node_stringify.h" +#include "variant/ast/utils.h" + +namespace gloo::AST { +enum VariantType { LLM, CODE }; + +std::vector LLMVariantNode::dependencies() const { + return function->dependencies(); +} + +std::vector CodeVariantNode::dependencies() const { + std::vector deps = function->dependencies(); + deps.insert(deps.end(), usedFunction.begin(), usedFunction.end()); + return deps; +} + +VariantType getVariantType(const Tokenizer::Token& tk) { + ensureTokenKind(tk, Tokenizer::TokenKind::VariantKeyword); + // Find the variant type by looking for the word in the brackets. + auto bracket_start = tk.value.find('['); + auto bracket_end = tk.value.find(']'); + if (bracket_start == std::string::npos || bracket_end == std::string::npos) { + throw std::runtime_error("Invalid variant name: " + tk.value); + } + auto variant_type = + tk.value.substr(bracket_start + 1, bracket_end - bracket_start - 1); + if (variant_type == "llm") { + return VariantType::LLM; + } else if (variant_type == "code") { + return VariantType::CODE; + } else { + throw SyntaxError(tk, "Unknown variant type: " + variant_type); + } +} + +std::vector> VariantBaseNode::Parser( + std::vector::const_iterator& it) { + const auto type = getVariantType(*it++); + const auto& name = ParseName(it); + auto& tk = *it; + const auto& forKeyword = ParseName(it); + if (forKeyword != "for") { + throw SyntaxError(tk, "Expected 'for' keyword. Got: " + forKeyword); + } + const auto& function_name = ParseName(it); + std::vector> baseResult; + switch (type) { + case VariantType::LLM: + for (const auto& x : LLMVariantNode::Parser(function_name, name, it)) { + baseResult.push_back(x); + } + break; + case VariantType::CODE: + baseResult.push_back(CodeVariantNode::Parser(function_name, name, it)); + break; + default: + throw SyntaxError(*it, "Unknown variant type"); + } + + return baseResult; +} + +std::vector> LLMVariantNode::Parser( + const std::string& functionName, const std::string& variantName, + std::vector::const_iterator& it) { + const auto& start_token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + ensureTokenKind(*it++, Tokenizer::TokenKind::AtSymbol); + ensureTokenKind(*it++, Tokenizer::TokenKind::ClientKeyword); + std::vector client_names = ParseIdentifierList(it); + std::optional prompt; + std::vector> stringify; + std::vector> methods; + while (it->kind == Tokenizer::TokenKind::AtSymbol) { + ++it; + switch (it->kind) { + case Tokenizer::TokenKind::PromptKeyword: { + ensureTokenKind(*it++, Tokenizer::TokenKind::PromptKeyword); + prompt = ParseString(it); + break; + } + case Tokenizer::TokenKind::MethodKeyword: { + methods.push_back(MethodNode::Parser(it)); + break; + } + case Tokenizer::TokenKind::StringifyKeyword: { + stringify.push_back(StringifyNode::Parser(it)); + break; + } + default: + throw SyntaxError(*it, std::string("Unexpected field: ") + it->value); + } + } + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + if (!prompt.has_value()) { + throw SyntaxError(start_token, "Prompt must be specified"); + } + + if (client_names.size() == 0) { + throw SyntaxError(start_token, "At least one client must be specified"); + } + if (client_names.size() == 1) { + return {std::shared_ptr(new LLMVariantNode( + start_token, variantName, functionName, client_names[0], prompt.value(), + stringify, methods))}; + } else { + std::vector> result; + for (auto& client_name : client_names) { + result.push_back(std::shared_ptr(new LLMVariantNode( + start_token, variantName + "_" + client_name, functionName, + client_name, prompt.value(), stringify, methods))); + } + return result; + } +} + +std::shared_ptr CodeVariantNode::Parser( + const std::string& functionName, const std::string& variantName, + std::vector::const_iterator& it) { + std::vector usedFunction; + std::vector> methods; + const auto& start_token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + while (it->kind == Tokenizer::TokenKind::AtSymbol) { + ++it; + switch (it->kind) { + case Tokenizer::TokenKind::DependsOnKeyword: { + ensureTokenKind(*it++, Tokenizer::TokenKind::DependsOnKeyword); + if (usedFunction.size() > 0) { + throw SyntaxError(*it, "Multiple depends_on statements"); + } + const auto deps = ParseIdentifierList(it); + usedFunction.insert(usedFunction.end(), deps.begin(), deps.end()); + break; + } + case Tokenizer::TokenKind::MethodKeyword: { + methods.push_back(MethodNode::Parser(it)); + break; + } + default: + break; + } + } + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + return std::shared_ptr(new CodeVariantNode( + start_token, variantName, functionName, usedFunction, methods)); +} + +void LLMVariantNode::validate( + const std::unordered_set& class_names, + const std::unordered_set& enum_names, + const std::unordered_set&, + const std::unordered_set& client_names) const { + if (client_names.find(client_name) == client_names.end()) { + throw SyntaxError(token, "client[llm] not found: " + client_name); + } + + // Ensure stringify properties are valid + std::unordered_set property_names; + for (const auto& prop : stringify) { + if (property_names.find(prop->type_name) != property_names.end()) { + throw SyntaxError(token, + "Duplicate stringified property: " + prop->type_name); + } + if (class_names.find(prop->type_name) == class_names.end() && + enum_names.find(prop->type_name) == enum_names.end()) { + throw SyntaxError(token, "Stringified property must be enum or class: " + + prop->type_name); + } + property_names.insert(prop->type_name); + } + + // Ensure methods are valid + std::unordered_set method_names; + for (const auto& method : methods) { + if (method_names.find(method->name) != method_names.end()) { + throw SyntaxError(token, "Duplicate method: " + method->name); + } + method_names.insert(method->name); + } +} + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/functions/variants/variant_base.h b/cli/cpp_src/variant/ast/functions/variants/variant_base.h new file mode 100644 index 000000000..ccb3ad14e --- /dev/null +++ b/cli/cpp_src/variant/ast/functions/variants/variant_base.h @@ -0,0 +1,46 @@ +#pragma once +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +class FunctionNode; + +class VariantBaseNode : public OutputNode { + public: + VariantBaseNode(const Tokenizer::Token &token, const std::string &name, + const std::string &functionName) + : OutputNode(token, name), functionName(functionName) {} + + virtual ~VariantBaseNode() = default; + + virtual std::string type() const = 0; + + std::string toString() const { + std::string result = functionName + "::" + name + "[" + type() + "]"; + return result; + } + NodeOrder order() const { return NodeOrder::VARIANT; } + const std::string functionName; + + virtual std::vector dependencies() const { return {}; } + + static std::vector> Parser( + std::vector::const_iterator &it); + virtual std::string uniqueName() const { return functionName + "::" + name; } + + virtual void validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names, + const std::unordered_set &function_names, + const std::unordered_set &client_names) const = 0; + + FunctionNode *function; // This is a non-owning pointer +}; + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/node.h b/cli/cpp_src/variant/ast/node.h new file mode 100644 index 000000000..63b8cb9d2 --- /dev/null +++ b/cli/cpp_src/variant/ast/node.h @@ -0,0 +1,44 @@ +#pragma once + +#include +#include + +#include "variant/generate/generate.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +class AstNode { + public: + AstNode(const Tokenizer::Token &token) : token(token) {} + + const Tokenizer::Token token; + + virtual ~AstNode() = default; + + virtual std::string toString() const = 0; + + friend std::ostream &operator<<(std::ostream &os, const AstNode &node) { + os << node.toString(); + return os; + } +}; + +enum NodeOrder { + ENUM = 1, + CLASS, + LLM_CLIENT, + FUNCTION, + VARIANT, + TEST_GROUP, +}; + +class OutputNode : public AstNode, public Generate::PythonImpl { + public: + OutputNode(const Tokenizer::Token &token, const std::string &name) + : AstNode(token), name(name) {} + + virtual NodeOrder order() const = 0; + virtual std::string uniqueName() const { return name; } + const std::string name; +}; +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/shared/node_code.cc b/cli/cpp_src/variant/ast/shared/node_code.cc new file mode 100644 index 000000000..746d64c38 --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_code.cc @@ -0,0 +1,36 @@ +#include "variant/ast/shared/node_code.h" + +#include + +#include "variant/ast/utils.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +Language fromLangToken(const Tokenizer::Token &tk) { + // Find the lang type by looking for the word in the brackets. + auto bracket_start = tk.value.find('['); + auto bracket_end = tk.value.rfind(']'); + if (bracket_start == std::string::npos || bracket_end == std::string::npos) { + throw std::runtime_error("Invalid language name: " + tk.value); + } + auto lang = + tk.value.substr(bracket_start + 1, bracket_end - bracket_start - 1); + + if (lang == "py") { + return Language::PYTHON; + } else if (lang == "ts") { + return Language::TYPESCRIPT; + } else { + throw std::runtime_error("Unknown language: " + lang); + } +} + +CodeNode CodeNode::Parser(std::vector::const_iterator &it) { + ensureTokenKind(*it, Tokenizer::TokenKind::Lang); + const Tokenizer::Token &start_token = *it; + Language language = fromLangToken(*it++); + std::string code = ParseString(it); + return {start_token, language, code}; +} + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/shared/node_code.h b/cli/cpp_src/variant/ast/shared/node_code.h new file mode 100644 index 000000000..ba5feaa1c --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_code.h @@ -0,0 +1,31 @@ +#pragma once +#include + +#include "variant/ast/node.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +enum Language { + PYTHON, + TYPESCRIPT, +}; + +class CodeNode : public AstNode { + public: + CodeNode(const Tokenizer::Token token, const Language &language, + const std::string &code) + : AstNode(token), language(language), code(code) {} + + Language language; + std::string code; + + std::string toString() const { return code; } + + // TODO: do some syntax checking + void validate() const {} + + static CodeNode Parser(std::vector::const_iterator &it); +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/shared/node_method.cc b/cli/cpp_src/variant/ast/shared/node_method.cc new file mode 100644 index 000000000..ed3eef910 --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_method.cc @@ -0,0 +1,53 @@ +#include "variant/ast/shared/node_method.h" + +#include +#include +#include +#include + +#include "variant/ast/utils.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +std::string MethodNode::toString() const { + std::stringstream ss; + ss << "Method: " << name << std::endl; + for (const auto &lang : langs) { + ss << lang << std::endl; + } + return ss.str(); +} + +std::shared_ptr MethodNode::Parser( + std::vector::const_iterator &it) { + ensureTokenKind(*it, Tokenizer::TokenKind::MethodKeyword); + const Tokenizer::Token &start_token = *it; + it++; + ensureTokenKind(*it, Tokenizer::TokenKind::Identifier); + std::string name = it->value; + it++; + + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + + std::vector langs; + while (it->kind == Tokenizer::TokenKind::AtSymbol) { + ++it; + langs.push_back(CodeNode::Parser(it)); + } + + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + return std::shared_ptr(new MethodNode(start_token, name, langs)); +} + +void MethodNode::validate() const { + // Method should have at least one variant. + if (langs.size() == 0) { + throw SyntaxError(token, "Method must have at least one lang."); + } + + for (auto &lang : langs) { + lang.validate(); + } +} + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/shared/node_method.h b/cli/cpp_src/variant/ast/shared/node_method.h new file mode 100644 index 000000000..f4a34dcef --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_method.h @@ -0,0 +1,31 @@ +#pragma once +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/ast/shared/node_code.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +class MethodNode : public AstNode { + public: + MethodNode(const Tokenizer::Token token, const std::string &name, + const std::vector &langs) + : AstNode(token), name(name), langs(langs) {} + + const std::string name; + const std::vector langs; + + std::string toString() const; + std::string toPyString(bool with_usage) const; + + static std::shared_ptr Parser( + std::vector::const_iterator &it); + + void validate() const; +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/shared/node_stringify.cc b/cli/cpp_src/variant/ast/shared/node_stringify.cc new file mode 100644 index 000000000..4e1717781 --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_stringify.cc @@ -0,0 +1,56 @@ +#include "variant/ast/shared/node_stringify.h" + +#include "variant/ast/utils.h" + +namespace gloo::AST { + +std::shared_ptr StringifyNode::Parser( + std::vector::const_iterator &it) { + const auto &start_token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::StringifyKeyword); + const auto &type_name = ParseName(it); + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + std::vector> properties; + while (it->kind == Tokenizer::TokenKind::Identifier) { + properties.push_back(StringifyPropertyNode::Parser(it)); + } + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + return std::shared_ptr( + new StringifyNode(start_token, type_name, properties)); +} + +std::shared_ptr StringifyPropertyNode::Parser( + std::vector::const_iterator &it) { + const auto &start_token = *it; + const auto &name = ParseName(it); + std::optional rename; + std::optional describe; + bool skip = false; + + while (it->kind == Tokenizer::TokenKind::AtSymbol) { + ++it; + switch (it->kind) { + case Tokenizer::TokenKind::AliasKeyword: { + ensureTokenKind(*it++, Tokenizer::TokenKind::AliasKeyword); + rename = ParseString(it); + break; + } + case Tokenizer::TokenKind::DescriptionKeyword: { + ensureTokenKind(*it++, Tokenizer::TokenKind::DescriptionKeyword); + describe = ParseString(it); + break; + } + case Tokenizer::TokenKind::SkipKeyword: { + skip = true; + ++it; + break; + } + default: + throw SyntaxError(*it, "Unknown property"); + } + } + + return std::shared_ptr( + new StringifyPropertyNode(start_token, name, rename, describe, skip)); +} +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/shared/node_stringify.h b/cli/cpp_src/variant/ast/shared/node_stringify.h new file mode 100644 index 000000000..3c8179817 --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_stringify.h @@ -0,0 +1,99 @@ +#pragma once +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/error.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::Python { +std::string AsValue(const std::string &value); +} + +namespace gloo::AST { + +class StringifyPropertyNode : public AstNode { + public: + StringifyPropertyNode(const Tokenizer::Token &token, const std::string &name, + const std::optional &rename, + const std::optional &describe, bool skip) + : AstNode(token), + name(name), + rename(rename), + describe(describe), + skip(skip) {} + + const std::string name; + const std::optional rename; + const std::optional describe; + const bool skip; + const std::string pyString() const { + std::string res = name + "= StringifyRemappedField("; + if (skip) { + res += "skip=True,"; + } else { + if (rename.has_value()) { + res += "rename=" + Python::AsValue(rename.value()) + ","; + } + if (describe.has_value()) { + res += "describe=" + Python::AsValue(describe.value()) + ","; + } + } + res += ")"; + return res; + } + + std::string toString() const { + std::stringstream ss; + ss << " " << name; + if (skip) { + ss << " [skipped]"; + } else { + if (rename.has_value()) { + ss << " [aliased to] " << rename.value(); + } + if (describe.has_value()) { + ss << " [described as] " << describe.value(); + } + } + return ss.str(); + } + + static std::shared_ptr Parser( + std::vector::const_iterator &it); +}; + +class StringifyNode : public AstNode { + public: + StringifyNode( + const Tokenizer::Token token, const std::string &type_name, + const std::vector> &properties) + : AstNode(token), type_name(type_name), properties(properties) {} + + const std::string type_name; + const std::vector> properties; + + std::string toString() const { + std::stringstream ss; + ss << "Stringify " << type_name << " {" << std::endl; + for (const auto &prop : properties) { + ss << prop << std::endl; + } + ss << "}"; + return ss.str(); + } + + const std::string pyString() const { + std::string params = ""; + for (const auto &prop : properties) { + params += prop->pyString() + ","; + } + return params; + } + + static std::shared_ptr Parser( + std::vector::const_iterator &it); +}; +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/shared/node_type.cc b/cli/cpp_src/variant/ast/shared/node_type.cc new file mode 100644 index 000000000..1e2159300 --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_type.cc @@ -0,0 +1,152 @@ +#include "variant/ast/shared/node_type.h" + +#include + +#include "variant/ast/types/node_class.h" +#include "variant/ast/types/node_enum.h" +#include "variant/ast/utils.h" +#include "variant/error.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +void TypeTypeRef::link(const std::vector> &classes, + const std::vector> &enums) { + for (auto &cn : classes) { + if (cn->name == name) { + this->class_node = cn.get(); + return; + } + } + for (auto &en : enums) { + if (en->name == name) { + this->enum_node = en.get(); + return; + } + } +} + +std::shared_ptr TypeNode::Parser( + std::vector::const_iterator &it) { + const Tokenizer::Token &start_token = *it; + // TODO: this should be ParseWord not ParseString. + const std::string typeString = ParseString(it); + return std::shared_ptr(new TypeNode(start_token, typeString)); +} + +void TypeNode::validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + if (!type) { + throw SyntaxError(token, "Type not found"); + } + type->validate(class_names, enum_names); +} + +std::shared_ptr TypeFromStringImpl(const Tokenizer::Token &token, + const std::string &str, int &pos); + +std::shared_ptr ParseListType(const Tokenizer::Token &token, + const std::string &str, int &pos) { + if (pos - 1 > 0 && str[pos] == ']' && str[pos - 1] == '[') { + pos -= 2; + auto type = TypeFromStringImpl(token, str, pos); + return std::shared_ptr(new TypeTypeList(token, type)); + } + return nullptr; +} + +std::shared_ptr ParseOptionalType(const Tokenizer::Token &token, + const std::string &str, int &pos) { + if (str[pos] == '?') { + pos--; + auto type = TypeFromStringImpl(token, str, pos); + return std::shared_ptr(new TypeTypeOptional(token, type)); + } + return nullptr; +} + +std::shared_ptr ParseUnionType(const Tokenizer::Token &token, + const std::string &str, int &pos) { + if (str[pos] != '|') return nullptr; + + std::vector> types; + + pos--; + while (pos >= 0) { + auto type = TypeFromStringImpl(token, str, pos); + if (!type) { + return nullptr; + } + + types.push_back(type); + + if (pos > 0 && str[pos] == '|') { + pos--; + } else { + break; + } + } + + if (types.empty()) return nullptr; + return std::shared_ptr(new TypeTypeUnion(token, types)); +} + +std::shared_ptr ParseBaseType(const Tokenizer::Token &token, + const std::string &str, int &pos) { + int start = pos; + while (pos >= 0 && (isalpha(str[pos]) || isdigit(str[pos]))) { + pos--; + } + + if (start == pos) return nullptr; + + std::string baseType = str.substr(pos + 1, start - pos); + + if (baseType == "int") { + return std::shared_ptr(new TypeTypePrimitive(token, INT)); + } else if (baseType == "float") { + return std::shared_ptr(new TypeTypePrimitive(token, FLOAT)); + } else if (baseType == "bool") { + return std::shared_ptr(new TypeTypePrimitive(token, BOOL)); + } else if (baseType == "char") { + return std::shared_ptr(new TypeTypePrimitive(token, CHAR)); + } else if (baseType == "string") { + return std::shared_ptr(new TypeTypePrimitive(token, STRING)); + } else if (baseType == "null") { + return std::shared_ptr(new TypeTypePrimitive(token, NONE)); + } else { + return std::shared_ptr(new TypeTypeRef(token, baseType)); + } +} + +std::shared_ptr TypeFromStringImpl(const Tokenizer::Token &token, + const std::string &str, int &pos) { + auto optionalType = ParseOptionalType(token, str, pos); + if (optionalType) { + return optionalType; + } + + auto listType = ParseListType(token, str, pos); + if (listType) { + return listType; + } + + auto unionType = ParseUnionType(token, str, pos); + if (unionType) { + return unionType; + } + + return ParseBaseType(token, str, pos); +} + +std::shared_ptr TypeFromString(const Tokenizer::Token &token, + const std::string &str) { + int pos = static_cast(str.length()) - 1; + auto type = TypeFromStringImpl(token, str, pos); + if (pos != -1) { + throw SyntaxError(token, "Invalid type: " + str); + } + return type; +} +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/shared/node_type.h b/cli/cpp_src/variant/ast/shared/node_type.h new file mode 100644 index 000000000..bd6acac07 --- /dev/null +++ b/cli/cpp_src/variant/ast/shared/node_type.h @@ -0,0 +1,302 @@ +#pragma once +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/error.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +class ClassNode; +class EnumNode; +class TypeType { + public: + TypeType(const Tokenizer::Token &token) : token(token) {} + + const Tokenizer::Token token; + + virtual ~TypeType() {} + virtual std::string toPyString() const = 0; + virtual std::string toPyDescription() const = 0; + virtual std::string toString() const = 0; + virtual bool isCustomType() const { return false; } + virtual std::string defaultValue() const { return ""; } + virtual std::vector dependencies() const { return {}; } + virtual void validate(const std::unordered_set &, + const std::unordered_set &) const = 0; + virtual void link(const std::vector> &, + const std::vector> &) = 0; + + ClassNode *class_node = nullptr; + EnumNode *enum_node = nullptr; +}; + +enum PrimitiveType { + CHAR, + STRING, + INT, + FLOAT, + BOOL, + NONE, +}; + +class TypeTypeOptional final : public TypeType { + public: + TypeTypeOptional(const Tokenizer::Token &token, + std::shared_ptr type) + : TypeType(token), type(type) {} + void link(const std::vector> &classes, + const std::vector> &enums) { + type->link(classes, enums); + } + virtual void validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + type->validate(class_names, enum_names); + } + virtual std::string defaultValue() const { return " = None"; } + + virtual std::string toString() const { + return "Optional[" + type->toString() + "]"; + } + + std::string toPyDescription() const { + return "StringifyOptional(" + type->toPyDescription() + ")"; + } + + std::string toPyString() const { + return "typing.Optional[" + type->toPyString() + "]"; + } + + virtual std::vector dependencies() const { + return type->dependencies(); + } + + // shared_ptr to TypeType + const std::shared_ptr type; +}; + +class TypeTypePrimitive final : public TypeType { + public: + TypeTypePrimitive(const Tokenizer::Token &token, const PrimitiveType &type) + : TypeType(token), type(type) {} + void link(const std::vector> &, + const std::vector> &) {} + + virtual std::string toString() const { + switch (type) { + case PrimitiveType::CHAR: + return "char"; + case PrimitiveType::STRING: + return "string"; + case PrimitiveType::INT: + return "int"; + case PrimitiveType::FLOAT: + return "float"; + case PrimitiveType::BOOL: + return "bool"; + case PrimitiveType::NONE: + return "null"; + default: + throw SyntaxError(token, "Unknown primitive type"); + } + } + + std::string toPyDescription() const { + switch (type) { + case PrimitiveType::CHAR: + return "StringifyChar()"; + case PrimitiveType::STRING: + return "StringifyString()"; + case PrimitiveType::INT: + return "StringifyInt()"; + case PrimitiveType::FLOAT: + return "StringifyFloat()"; + case PrimitiveType::BOOL: + return "StringifyBool()"; + case PrimitiveType::NONE: + return "StringifyNone()"; + default: + throw SyntaxError(token, "Unknown primitive type"); + } + } + + std::string toPyString() const { + switch (type) { + case PrimitiveType::CHAR: + return "str"; + case PrimitiveType::STRING: + return "str"; + case PrimitiveType::INT: + return "int"; + case PrimitiveType::FLOAT: + return "float"; + case PrimitiveType::BOOL: + return "bool"; + case PrimitiveType::NONE: + return "None"; + default: + throw SyntaxError(token, "Unknown primitive type"); + } + } + + const PrimitiveType type; + + void validate(const std::unordered_set &, + const std::unordered_set &) const {} +}; + +class TypeTypeRef final : public TypeType { + public: + TypeTypeRef(const Tokenizer::Token &token, const std::string &name) + : TypeType(token), name(name) {} + void link(const std::vector> &classes, + const std::vector> &enums); + + virtual std::string toString() const { return name; } + + bool isCustomType() const { return true; } + std::string toPyString() const { return toString(); } + std::string toPyDescription() const { + return "Stringify" + toString() + "()"; + } + + virtual std::vector dependencies() const { return {name}; } + + const std::string name; + + virtual void validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + if (class_names.find(name) == class_names.end() && + enum_names.find(name) == enum_names.end()) { + throw SyntaxError(token, "Unknown type: " + name); + } + } +}; + +class TypeTypeList final : public TypeType { + public: + TypeTypeList(const Tokenizer::Token &token, std::shared_ptr type) + : TypeType(token), type(type) {} + void link(const std::vector> &classes, + const std::vector> &enums) { + type->link(classes, enums); + } + + virtual std::string toString() const { + return "List[" + type->toString() + "]"; + } + void validate(const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + type->validate(class_names, enum_names); + } + std::string toPyString() const { + return "typing.List[" + type->toPyString() + "]"; + } + std::string toPyDescription() const { + return "StringifyList(" + type->toPyDescription() + ")"; + } + virtual std::vector dependencies() const { + return type->dependencies(); + } + + // shared_ptr to TypeType + const std::shared_ptr type; +}; + +class TypeTypeUnion final : public TypeType { + public: + TypeTypeUnion(const Tokenizer::Token &token, + const std::vector> &types) + : TypeType(token), types(types) {} + void link(const std::vector> &classes, + const std::vector> &enums) { + for (auto &type : types) { + type->link(classes, enums); + } + } + + virtual std::string toString() const { + std::stringstream ss; + ss << "Union["; + for (auto &type : types) { + ss << type->toString() << ", "; + } + ss << "]"; + return ss.str(); + } + std::string toPyString() const { + std::stringstream ss; + ss << "typing.Union["; + for (auto &type : types) { + ss << type->toPyString() << ", "; + } + ss << "]"; + return ss.str(); + } + + virtual void validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + for (auto &type : types) { + type->validate(class_names, enum_names); + } + } + + std::string toPyDescription() const { + std::string desc = "StringifyUnion[" + toPyString() + "]("; + for (auto &type : types) { + desc += type->toPyDescription() + ", "; + } + desc += ")"; + return desc; + } + + virtual std::vector dependencies() const { + std::vector deps; + for (auto &type : types) { + auto type_deps = type->dependencies(); + deps.insert(deps.end(), type_deps.begin(), type_deps.end()); + } + return deps; + } + + // shared_ptr to TypeType + const std::vector> types; +}; + +std::shared_ptr TypeFromString(const Tokenizer::Token &token, + const std::string &str); + +class TypeNode : public AstNode { + public: + TypeNode(const Tokenizer::Token token, const std::string &type) + : AstNode(token), type(TypeFromString(token, type)) { + if (!this->type) { + throw SyntaxError(token, "Unexpected type: " + type); + } + } + void link(const std::vector> &classes, + const std::vector> &enums) { + type->link(classes, enums); + } + virtual std::string pythonType() const { return type->toPyString(); } + std::string pythonDescription() const { return type->toPyDescription(); } + + std::vector dependencies() const { return type->dependencies(); } + + const std::shared_ptr type; + + std::string toString() const { return "Type[" + type->toString() + "]"; } + + void validate(const std::unordered_set &class_names, + const std::unordered_set &enum_names) const; + + static std::shared_ptr Parser( + std::vector::const_iterator &it); +}; +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/types/node_class.cc b/cli/cpp_src/variant/ast/types/node_class.cc new file mode 100644 index 000000000..87e8cbc7b --- /dev/null +++ b/cli/cpp_src/variant/ast/types/node_class.cc @@ -0,0 +1,111 @@ +#include "variant/ast/types/node_class.h" + +#include +#include +#include +#include + +#include "variant/ast/utils.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +std::string PropertyNode::toString() const { + return "Property " + name + ": " + type->toString(); +} + +PropertyNode PropertyNode::Parser( + std::vector::const_iterator &it) { + const Tokenizer::Token &start_token = *it; + auto name = ParseName(it); + auto type = TypeNode::Parser(it); + return PropertyNode(start_token, name, type); +} + +std::string ClassNode::toString() const { + std::stringstream ss; + ss << "Class: " << name << std::endl; + for (auto &property : properties) { + ss << property << std::endl; + } + for (auto &method : methods) { + ss << method; + } + return ss.str(); +} + +std::shared_ptr ClassNode::Parser( + std::vector::const_iterator &it) { + const Tokenizer::Token &class_token = *it; + ensureTokenKind(*it++, Tokenizer::TokenKind::ClassKeyword); + const std::string name = ParseName(it); + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + + std::vector properties; + std::vector methods; + + while (it->kind == Tokenizer::TokenKind::AtSymbol || + it->kind == Tokenizer::TokenKind::Identifier) { + switch (it->kind) { + case Tokenizer::TokenKind::Identifier: + properties.push_back(PropertyNode::Parser(it)); + break; + case Tokenizer::TokenKind::AtSymbol: { + ++it; + // For now we only support methods. + methods.push_back(*MethodNode::Parser(it)); + break; + } + default: + throw SyntaxError(*it, "Expected " + + Tokenizer::TokenKindToString( + Tokenizer::TokenKind::Identifier) + + " or " + + Tokenizer::TokenKindToString( + Tokenizer::TokenKind::MethodKeyword) + + ": Got[" + it->value + "]"); + } + } + + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + + return std::shared_ptr( + new ClassNode(class_token, name, properties, methods)); +} + +void PropertyNode::validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + type->validate(class_names, enum_names); +} + +void ClassNode::validate( + const std::unordered_set &class_names, + const std::unordered_set &enum_names) const { + std::unordered_set names; + for (auto &property : properties) { + if (names.find(property.name) != names.end()) { + throw DuplicateError({token}, + "Duplicate property name: " + property.name); + } + names.insert(property.name); + property.validate(class_names, enum_names); + } + for (auto &method : methods) { + if (names.find(method.name) != names.end()) { + throw DuplicateError({token}, "Duplicate method name: " + method.name); + } + names.insert(method.name); + method.validate(); + } +} + +std::vector ClassNode::dependencies() const { + std::vector deps; + for (auto &property : properties) { + auto type_deps = property.type->dependencies(); + deps.insert(deps.end(), type_deps.begin(), type_deps.end()); + } + return deps; +} +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/types/node_class.h b/cli/cpp_src/variant/ast/types/node_class.h new file mode 100644 index 000000000..a9a2ba640 --- /dev/null +++ b/cli/cpp_src/variant/ast/types/node_class.h @@ -0,0 +1,72 @@ +#pragma once + +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/ast/shared/node_code.h" +#include "variant/ast/shared/node_method.h" +#include "variant/ast/shared/node_type.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { + +class PropertyNode : public AstNode { + public: + PropertyNode(const Tokenizer::Token &token, const std::string &name, + const std::shared_ptr &type) + : AstNode(token), name(name), type(type) {} + + const std::string name; + const std::shared_ptr type; + + std::string toString() const; + + static PropertyNode Parser(std::vector::const_iterator &it); + + void validate(const std::unordered_set &class_names, + const std::unordered_set &enum_names) const; + + void link(const std::vector> &classes, + const std::vector> &enums) { + type->link(classes, enums); + } + + std::string pythonDescription() const; +}; + +class ClassNode : public OutputNode { + public: + ClassNode(const Tokenizer::Token token, const std::string &name, + const std::vector &properties, + const std::vector &methods) + : OutputNode(token, name), properties(properties), methods(methods) {} + + PYTHONIC(); + + NodeOrder order() const { return NodeOrder::CLASS; } + + std::vector properties; + std::vector methods; + + std::string toString() const; + + void validate(const std::unordered_set &class_names, + const std::unordered_set &enum_names) const; + + std::vector dependencies() const; + + static std::shared_ptr Parser( + std::vector::const_iterator &it); + + void link(const std::vector> &classes, + const std::vector> &enums) { + for (auto &property : properties) { + property.link(classes, enums); + } + } +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/types/node_enum.cc b/cli/cpp_src/variant/ast/types/node_enum.cc new file mode 100644 index 000000000..558d088ff --- /dev/null +++ b/cli/cpp_src/variant/ast/types/node_enum.cc @@ -0,0 +1,63 @@ +#include "variant/ast/types/node_enum.h" + +#include +#include +#include + +#include "variant/ast/utils.h" +#include "variant/common.h" + +namespace gloo::AST { +using namespace Tokenizer; + +std::string EnumNode::toString() const { + std::stringstream ss; + ss << "Enum: " << name << std::endl; + for (const auto &value : values) { + ss << " " << value << std::endl; + } + return ss.str(); +} + +std::shared_ptr EnumNode::Parser( + std::vector::const_iterator &it) { + /* Enums are of the form: + * enum { + * values { + * + * + * } + * } + */ + const Tokenizer::Token &start_token = *it; + ensureTokenKind(*it++, TokenKind::EnumKeyword); + const std::string name = ParseName(it); + ensureTokenKind(*it++, TokenKind::LeftCurlyBracket); + std::map> values; + while (it->kind == Tokenizer::TokenKind::Identifier) { + auto &token = *it; + values[ParseName(it)].push_back(token); + } + ensureTokenKind(*it++, TokenKind::RightCurlyBracket); + + // If there are duplicate values, throw an error. + for (const auto &[key, val] : values) { + if (val.size() > 1) { + throw DuplicateError(val, "Duplicate value in enum: " + key); + } + } + + std::vector values_str; + for (const auto &[key, val] : values) { + values_str.push_back(key); + } + + return std::shared_ptr(new EnumNode(start_token, name, values_str)); +} + +void EnumNode::validate() const { + if (values.size() == 0) { + throw SyntaxError(token, "Enum must have at least one value."); + } +} +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/types/node_enum.h b/cli/cpp_src/variant/ast/types/node_enum.h new file mode 100644 index 000000000..032d2152c --- /dev/null +++ b/cli/cpp_src/variant/ast/types/node_enum.h @@ -0,0 +1,27 @@ +#pragma once +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +class EnumNode : public OutputNode { + public: + EnumNode(const Tokenizer::Token token, const std::string &name, + const std::vector &values) + : OutputNode(token, name), values(values) {} + NodeOrder order() const { return NodeOrder::ENUM; } + + const std::vector values; + std::string toString() const; + PYTHONIC(); + + void validate() const; + + static std::shared_ptr Parser( + std::vector::const_iterator &it); +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/types/node_llm_client.cc b/cli/cpp_src/variant/ast/types/node_llm_client.cc new file mode 100644 index 000000000..8f7dab4a2 --- /dev/null +++ b/cli/cpp_src/variant/ast/types/node_llm_client.cc @@ -0,0 +1,114 @@ +#include "variant/ast/types/node_llm_client.h" + +#include +#include + +#include "variant/ast/utils.h" +#include "variant/common.h" + +namespace gloo::AST { +using namespace Tokenizer; + +std::string LLMClientNode::toString() const { + std::stringstream ss; + ss << "LLMClient: " << name << ": " << provider << std::endl; + for (const auto &[key, val] : args) { + ss << " " << key << ": " << val << std::endl; + } + return ss.str(); +} + +std::shared_ptr LLMClientNode::Parser( + std::vector::const_iterator &it) { + const Tokenizer::Token &start_token = *it; + ensureTokenKind(*it++, TokenKind::ClientKeyword); + const std::string name = ParseName(it); + ensureTokenKind(*it++, TokenKind::LeftCurlyBracket); + ensureTokenKind(*it++, TokenKind::AtSymbol); + ensureTokenKind(*it++, TokenKind::ProviderKeyword); + const std::string provider = ParseString(it); + + int num_retries = 0; + std::optional default_fallback_client; + std::unordered_map fallback_clients; + + std::unordered_map args; + while (it->kind != TokenKind::RightCurlyBracket) { + switch (it->kind) { + case TokenKind::Identifier: { + const std::string key = ParseName(it); + const std::string value = ParseString(it); + args[key] = value; + break; + } + case TokenKind::AtSymbol: { + ++it; + switch (it->kind) { + case TokenKind::RetryKeyword: { + ++it; + num_retries = std::stoi(it->value); + ++it; + break; + } + case TokenKind::FallbackKeyword: { + if (it->value == "fallback") { + ++it; + default_fallback_client = ParseString(it); + } else { + // Find code from string: fallback[code] + int code = + std::stoi(it->value.substr(9, it->value.length() - 10)); + if (fallback_clients.find(code) != fallback_clients.end()) { + throw SyntaxError( + *it, "Duplicate fallback code: " + std::to_string(code)); + } + ++it; + fallback_clients[code] = ParseString(it); + } + break; + } + default: + throw SyntaxError(*it, "Unexpected token after @: " + it->value); + } + break; + } + default: + throw SyntaxError(*it, "Unexpected token in client[llm]: " + + Tokenizer::TokenKindToString(it->kind) + + ": " + it->value); + } + } + ensureTokenKind(*it++, TokenKind::RightCurlyBracket); + return std::shared_ptr( + new LLMClientNode(start_token, name, provider, args, num_retries, + default_fallback_client, fallback_clients)); +} + +void LLMClientNode::validate( + const std::unordered_set &llm_clients) const { + if (args.size() == 0) { + throw SyntaxError( + token, "Generally at least the model name is required for client[llm]"); + } + + if (default_fallback_client.has_value()) { + if (default_fallback_client.value() == name) { + throw SyntaxError(token, "Cannot fallback to self"); + } + if (llm_clients.find(default_fallback_client.value()) == + llm_clients.end()) { + throw SyntaxError(token, "Fallback client not found: " + + default_fallback_client.value()); + } + } + + for (const auto &[code, client] : fallback_clients) { + if (client == name) { + throw SyntaxError(token, "Cannot fallback to self"); + } + if (llm_clients.find(client) == llm_clients.end()) { + throw SyntaxError(token, "Fallback client not found: " + client); + } + } +} +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/types/node_llm_client.h b/cli/cpp_src/variant/ast/types/node_llm_client.h new file mode 100644 index 000000000..a60056177 --- /dev/null +++ b/cli/cpp_src/variant/ast/types/node_llm_client.h @@ -0,0 +1,56 @@ +#pragma once +#include +#include +#include +#include +#include + +#include "variant/ast/node.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +class LLMClientNode : public OutputNode { + public: + LLMClientNode(const Tokenizer::Token token, const std::string &name, + const std::string &provider, + const std::unordered_map &args, + const int num_retries, + const std::optional &default_fallback_client, + const std::unordered_map &fallback_clients) + : OutputNode(token, name), + provider(provider), + args(args), + num_retries(num_retries), + default_fallback_client(default_fallback_client), + fallback_clients(fallback_clients) {} + NodeOrder order() const { return NodeOrder::LLM_CLIENT; } + + std::vector dependencies() const { + std::unordered_set deps; + for (const auto &[key, val] : fallback_clients) { + deps.insert(val); + } + if (default_fallback_client.has_value()) { + deps.insert(default_fallback_client.value()); + } + + return std::vector(deps.begin(), deps.end()); + } + + const std::string provider; + const std::unordered_map args; + + const int num_retries; + const std::optional default_fallback_client; + const std::unordered_map fallback_clients; + + std::string toString() const; + PYTHONIC(); + + void validate(const std::unordered_set &llm_clients) const; + + static std::shared_ptr Parser( + std::vector::const_iterator &it); +}; + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/ast/utils.cc b/cli/cpp_src/variant/ast/utils.cc new file mode 100644 index 000000000..42bbfec99 --- /dev/null +++ b/cli/cpp_src/variant/ast/utils.cc @@ -0,0 +1,112 @@ +#include "variant/ast/utils.h" + +#include +#include + +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +std::string TokensToString(const std::vector &tokens) { + if (tokens.size() == 0) { + return ""; + } + + std::string result = ""; + int lastLine = tokens[0].line; + const int dedentCount = tokens[0].column; + int lastColumn = dedentCount; + for (const Tokenizer::Token &token : tokens) { + if (token.line > lastLine) { + result += std::string(token.line - lastLine, '\n'); + lastColumn = dedentCount; + lastLine = token.line; + } + + if (token.column < lastColumn) { + throw SyntaxError(token, + "Strings must be indented to match the first line."); + } + + if (token.column > lastColumn) { + result += std::string(token.column - lastColumn, ' '); + } + result += token.value; + lastColumn = token.column + static_cast(token.value.length()); + } + return result; +} + +std::string ParseMultiLineString( + std::vector::const_iterator &it) { + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + int bracketCount = 1; + std::vector tokens; + while (bracketCount > 0) { + if (it->kind == Tokenizer::TokenKind::Eof) { + throw SyntaxError(*it, "Missing closing '}'"); + } + if (it->kind == Tokenizer::TokenKind::LeftCurlyBracket) { + bracketCount++; + } else if (it->kind == Tokenizer::TokenKind::RightCurlyBracket) { + bracketCount--; + } + + if (bracketCount > 0) { + tokens.push_back(*it); + } + + // Check it is not the last token + it++; + } + + return TokensToString(tokens); +} + +std::string ParseSingleLineString( + int line, std::vector::const_iterator &it) { + std::vector tokens; + while (it->line == line) { + tokens.push_back(*it++); + } + return TokensToString(tokens); +} + +std::string ParseString(std::vector::const_iterator &it) { + if (it->kind == Tokenizer::TokenKind::LeftCurlyBracket) { + return ParseMultiLineString(it); + } else { + return ParseSingleLineString(it->line, it); + } +} + +std::vector ParseIdentifierList( + std::vector::const_iterator &it) { + std::vector result; + if (it->kind == Tokenizer::TokenKind::LeftCurlyBracket) { + // TODO: Support parsing single line lists + ensureTokenKind(*it++, Tokenizer::TokenKind::LeftCurlyBracket); + while (it->kind != Tokenizer::TokenKind::RightCurlyBracket) { + ensureTokenKind(*it, Tokenizer::TokenKind::Identifier); + result.push_back(it->value); + it++; + } + ensureTokenKind(*it++, Tokenizer::TokenKind::RightCurlyBracket); + } else { + const auto startLine = it->line; + while (it->line == startLine) { + ensureTokenKind(*it, Tokenizer::TokenKind::Identifier); + result.push_back(it->value); + it++; + } + } + return result; +} + +std::string ParseName(std::vector::const_iterator &it) { + ensureTokenKind(*it, Tokenizer::TokenKind::Identifier); + std::string name = it->value; + it++; + return name; +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/ast/utils.h b/cli/cpp_src/variant/ast/utils.h new file mode 100644 index 000000000..8de02314e --- /dev/null +++ b/cli/cpp_src/variant/ast/utils.h @@ -0,0 +1,25 @@ +#pragma once +#include +#include + +#include "variant/error.h" +#include "variant/tokenizer/tokenizer.h" + +namespace gloo::AST { +// Make some general error for syntax errors +inline void ensureTokenKind(const Tokenizer::Token &token, + Tokenizer::TokenKind expectedKind) { + if (token.kind != expectedKind) { + throw SyntaxError(token, "Expected " + + Tokenizer::TokenKindToString(expectedKind) + + " Got: " + token.value + " " + + Tokenizer::TokenKindToString(token.kind) + ""); + } +} + +std::string ParseString(std::vector::const_iterator &it); +std::vector ParseIdentifierList( + std::vector::const_iterator &it); +std::string ParseName(std::vector::const_iterator &it); + +} // namespace gloo::AST \ No newline at end of file diff --git a/cli/cpp_src/variant/common.h b/cli/cpp_src/variant/common.h new file mode 100644 index 000000000..1125cb655 --- /dev/null +++ b/cli/cpp_src/variant/common.h @@ -0,0 +1,37 @@ +#pragma once +#include + +#include "variant/error.h" + +namespace gloo { + +inline void validateTestCaseIdentifier(const Tokenizer::Token &token, + const std::string &name) { + // Check string is only alphanumeric and underscores. + // First character must be a letter. + if (name.length() == 0) { + throw SyntaxError(token, "Invalid identifier: " + name); + } else { + for (char c : name) { + if (!std::isalnum(c) && c != '_') { + throw SyntaxError(token, "Invalid identifier: " + name); + } + } + } +} + +inline void validateIdentifier(const Tokenizer::Token &token, + const std::string &name) { + // Check string is only alphanumeric and underscores. + // First character must be a letter. + if (name.length() == 0 || !std::isalpha(name[0])) { + throw SyntaxError(token, "Invalid identifier: " + name); + } else if (name.length() > 1) { + for (char c : name) { + if (!std::isalnum(c) && c != '_') { + throw SyntaxError(token, "Invalid identifier: " + name); + } + } + } +} +} // namespace gloo \ No newline at end of file diff --git a/cli/cpp_src/variant/error.h b/cli/cpp_src/variant/error.h new file mode 100644 index 000000000..007be4713 --- /dev/null +++ b/cli/cpp_src/variant/error.h @@ -0,0 +1,76 @@ +#pragma once +#include +#include + +#include "variant/tokenizer/tokenizer.h" + +namespace gloo { +class GlooError { + public: + GlooError(const std::string &msg) : msg(msg) {} + ~GlooError() {} + + std::string msg; + + virtual std::string what() const { return msg; } +}; + +class UndefinedError : public GlooError { + public: + UndefinedError(const Tokenizer::Token &token, const std::string &msg) + : GlooError(msg), token(token) {} + + const Tokenizer::Token token; + + virtual std::string what() const { + return token.file + ":" + std::to_string(token.line) + ":" + + std::to_string(token.column) + ": Undefined Error: " + msg; + } +}; + +class SyntaxError : public GlooError { + public: + SyntaxError(const Tokenizer::Token &token, const std::string &msg) + : GlooError(msg), token(token) {} + + const Tokenizer::Token token; + + virtual std::string what() const { + return token.file + ":" + std::to_string(token.line) + ":" + + std::to_string(token.column) + ": Syntax Error: " + msg; + } +}; + +class DuplicateError : public GlooError { + public: + DuplicateError(const std::vector &tokens, + const std::string &msg) + : GlooError(msg), tokens(tokens) {} + + const std::vector tokens; + + virtual std::string what() const { + std::string message = "Duplicate Error: " + msg + "\n"; + for (const auto &token : tokens) { + message += "\tFound at: " + token.file + ":" + + std::to_string(token.line) + ":" + + std::to_string(token.column) + "\n"; + } + return message; + } +}; + +class CircularDependencyError : public GlooError { + public: + CircularDependencyError(const Tokenizer::Token &token, const std::string &msg) + : GlooError(msg), token(token) {} + + const Tokenizer::Token token; + + virtual std::string what() const { + return token.file + ":" + std::to_string(token.line) + ":" + + std::to_string(token.column) + ": Circular dependency found\n\t" + + msg; + } +}; +} // namespace gloo \ No newline at end of file diff --git a/cli/cpp_src/variant/generate/dir_writer.cc b/cli/cpp_src/variant/generate/dir_writer.cc new file mode 100644 index 000000000..299d7408a --- /dev/null +++ b/cli/cpp_src/variant/generate/dir_writer.cc @@ -0,0 +1,85 @@ +#include "variant/generate/dir_writer.h" + +#include +#include +#include +#include +#include +#include +#include + +namespace gloo { + +const std::string PY_PREAMBLE = R"( +# This file is autogenerated by the gloo compiler +# Do not edit this file directly +# (skip unused imports) +# ruff: noqa: F401 +# flake8: noqa +# pylint: skip-file +# isort: skip_file +)"; + +std::unique_ptr FileWriter::stream() { + return std::unique_ptr(new BufferStream(this)); +} + +void PyFileWriter::flush(const std::filesystem::path &_path) { + // Sort imports + std::sort(imports.begin(), imports.end()); + + std::filesystem::path path = + std::filesystem::weakly_canonical(_path).make_preferred(); + // Ensure directory exists + std::filesystem::create_directories(path.parent_path()); + + std::ofstream file(path, std::ios::out); + file << PY_PREAMBLE; + // Write imports first. + for (const auto &[import_path, import_name, export_v] : imports) { + file << "from " << import_path << " import " << import_name << std::endl; + } + file << std::endl; + + // Replace the template with the correct values using a regex + for (const auto &[key, value] : template_vars) { + size_t pos = 0; + std::string to_replace = "{@@" + key + "}"; + while ((pos = content.find(to_replace, pos)) != std::string::npos) { + content.replace(pos, to_replace.length(), value); + pos += value.length(); + } + } + + // Check if there are any template variables left + std::smatch match; + std::regex pattern("\\{@@[a-zA-Z0-9_]+\\}"); + std::string::const_iterator searchStart(content.cbegin()); + while (std::regex_search(searchStart, content.cend(), match, pattern)) { + std::cerr << path << ": Template variable not replaced: " << match[0] + << std::endl; + searchStart = match.suffix().first; + } + if (std::regex_search(content, pattern)) { + throw std::runtime_error(path.string() + + ": Template variable not replaced"); + } + + file << content; + + // Add __all__ if there are any exports + if (std::any_of(imports.begin(), imports.end(), + [](const auto &import) { return std::get<2>(import); })) { + file << std::endl; + file << "__all__ = [" << std::endl; + for (const auto &[import_path, import_name, export_v] : imports) { + if (export_v) { + file << " '" << import_name << "'," << std::endl; + } + } + file << "]" << std::endl; + } + std::cout << "Wrote to " << path << std::endl; +} + +} // namespace gloo \ No newline at end of file diff --git a/cli/cpp_src/variant/generate/dir_writer.h b/cli/cpp_src/variant/generate/dir_writer.h new file mode 100644 index 000000000..6de778c3f --- /dev/null +++ b/cli/cpp_src/variant/generate/dir_writer.h @@ -0,0 +1,102 @@ +#pragma once +#include +#include +#include +#include +#include +#include +#include + +namespace gloo { +class FileWriter { + public: + FileWriter() {} + + virtual void flush(const std::filesystem::path& path) = 0; + std::unique_ptr stream(); + + void add_import(const std::string& import_path, + const std::string& import_name, bool export_imports = false) { + // Check if the import is already added + for (const auto& [path, name, _] : imports) { + if (path == import_path && name == import_name) { + return; + } + } + imports.push_back({import_path, import_name, export_imports}); + } + + void add_template_var(const std::string& key, const std::string& value) { + template_vars[key] = value; + } + + private: + // Create a stream which after closing will be added to the map of values. + class BufferStream : public std::stringstream { + public: + BufferStream(FileWriter* writer) : writer_(writer) {} + + ~BufferStream() { flush(); } + + void flush() { writer_->content += str(); } + + private: + FileWriter* writer_; + }; + + protected: + std::unordered_map template_vars; + std::string content; + std::vector> imports; +}; + +class PyFileWriter final : public FileWriter { + public: + void flush(const std::filesystem::path& path) override; +}; + +class DirectoryWriter { + public: + static DirectoryWriter& get() { + static DirectoryWriter instance; + return instance; + } + + std::shared_ptr file(const char* const path) { + return file(std::filesystem::path(path)); + } + std::shared_ptr file(const std::filesystem::path& path) { + std::string path_str = path.string(); + if (file_map.find(path_str) == file_map.end()) { + file_map[path_str] = std::shared_ptr(new PyFileWriter()); + } + return file_map[path_str]; + } + + void flush(std::filesystem::path root_path) { + const auto temp_path = root_path.parent_path() / std::string(".gloo.temp"); + // Ensure the path is a directory if it exists. + std::filesystem::create_directories(temp_path); + for (const auto& [path, writer] : file_map) { + writer->flush(temp_path / path); + } + // Write a special py.typed file to indicate this is a python package. + std::ofstream typed_file(temp_path / std::string("py.typed")); + typed_file.close(); + + // If the root path exists, delete it. + if (std::filesystem::exists(root_path)) { + std::filesystem::remove_all(root_path); + } + std::filesystem::rename(temp_path, root_path); + } + + private: + DirectoryWriter() {} + DirectoryWriter(const DirectoryWriter&) = delete; + void operator=(const DirectoryWriter&) = delete; + + std::unordered_map> file_map; +}; + +} // namespace gloo \ No newline at end of file diff --git a/cli/cpp_src/variant/generate/generate.h b/cli/cpp_src/variant/generate/generate.h new file mode 100644 index 000000000..13ab99233 --- /dev/null +++ b/cli/cpp_src/variant/generate/generate.h @@ -0,0 +1,19 @@ +#pragma once + +#include +#include +#include + +namespace gloo::Generate { + +#define PYTHONIC() void toPython(const std::vector& deps) const +#define IMPL_PYTHONIC(cls) \ + void cls::toPython(const std::vector& deps) const + +class PythonImpl { + public: + virtual ~PythonImpl() = default; + virtual PYTHONIC() = 0; +}; + +} // namespace gloo::Generate \ No newline at end of file diff --git a/cli/cpp_src/variant/generate/python/functions/function_test_group.cc b/cli/cpp_src/variant/generate/python/functions/function_test_group.cc new file mode 100644 index 000000000..2e1267ac7 --- /dev/null +++ b/cli/cpp_src/variant/generate/python/functions/function_test_group.cc @@ -0,0 +1,100 @@ +#include +#include + +#include "variant/ast/functions/node_function.h" +#include "variant/ast/functions/tests/test.h" +#include "variant/generate/dir_writer.h" +#include "variant/generate/python/utils.h" + +namespace gloo::AST { + +const std::string test_group_template = R"( +import typing +import pytest + +InputType = typing.TypeVar('InputType', bound={@@input_type}) +OutputType = typing.TypeVar('OutputType', bound={@@output_type}) + +@pytest.mark.gloo_test +@pytest.mark.asyncio +@pytest.mark.parametrize("variant", [{@@variants}]) +class Test{@@group_name}: +)"; + +const std::string test_case_template = R"( + async def test_{@@case_name__num__}(self, variant: VariantTypes) -> None: + arg = {@@arg__num__} + {@@setter__num__}await {@@func_name}(variant, arg) + {@@group_eval_methods} + {@@case_eval_methods__num__} +)"; + +IMPL_PYTHONIC(TestGroupNode) { + // Ensure the init file exists + DirectoryWriter::get().file(std::filesystem::path("functions") / + (functionName + "_") / std::string("tests") / + std::string("__init__.py")); + + auto file = DirectoryWriter::get().file( + std::filesystem::path("functions") / (functionName + "_") / + std::string("tests") / std::string("test_" + name + ".py")); + + std::string variants_str = ""; + for (const auto &variant : function->variants) { + variants_str += "'" + variant->name + "', "; + } + if (variants_str.size() > 0) + variants_str = variants_str.substr(0, variants_str.size() - 2); + file->add_template_var("variants", variants_str); + + std::string group_eval_methods = ""; + for (const auto &method : methods) { + group_eval_methods += method->toPyString(true); + } + // Indent group_eval_methods + file->add_template_var("group_eval_methods", + Python::indent(group_eval_methods, 2)); + + file->add_import("..", functionName); + file->add_import("..", "VariantTypes"); + file->add_template_var("group_name", name); + file->add_template_var("func_name", function->name); + file->add_template_var("input_type", function->input->pythonType()); + file->add_template_var("output_type", function->output->pythonType()); + + for (const auto &dep : deps) { + if (dep != functionName) { + file->add_import("....custom_types", dep); + } + } + + auto stream = file->stream(); + (*stream) << test_group_template; + + int counter = 0; + for (auto &c : cases) { + std::string case_eval_methods = ""; + for (const auto &method : c->methods) { + case_eval_methods += method->toPyString(true); + } + std::string counter_str = std::to_string(counter++); + file->add_template_var("case_name" + counter_str, c->name); + file->add_template_var("case_eval_methods" + counter_str, + Python::indent(case_eval_methods, 2)); + file->add_template_var("arg" + counter_str, + Python::AsValue(function->input->type, c->value)); + file->add_template_var( + "setter" + counter_str, + (group_eval_methods.empty() && case_eval_methods.empty()) + ? "" + : "output = "); + + std::string case_string = test_case_template; + // Replace every instance of __num__ with counter_str + case_string = + std::regex_replace(case_string, std::regex("__num__"), counter_str); + (*stream) << case_string; + } +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/functions/function_variant.cc b/cli/cpp_src/variant/generate/python/functions/function_variant.cc new file mode 100644 index 000000000..36c34b4ec --- /dev/null +++ b/cli/cpp_src/variant/generate/python/functions/function_variant.cc @@ -0,0 +1,265 @@ +#include "variant/ast/functions/node_function.h" +#include "variant/ast/functions/variants/code_variant.h" +#include "variant/ast/functions/variants/llm_variant.h" +#include "variant/ast/types/node_class.h" +#include "variant/generate/dir_writer.h" +#include "variant/generate/python/utils.h" + +namespace gloo::AST { + +const std::string llm_variant_template = R"( +import typing +import json +from gloo_py import LLMVariant +from gloo_py.stringify import ( + StringifyBase, + StringifyError, + StringifyNone, + StringifyBool, + StringifyInt, + StringifyChar, + StringifyString, + StringifyFloat, + StringifyEnum, + StringifyUnion, + StringifyOptional, + StringifyList, + StringifyClass, + FieldDescription, + EnumFieldDescription, + StringifyRemappedField, + StringifyCtx +) + +prompt = """\ +{@@prompt}""" + +stringifiers: typing.List[typing.Any] = [] +def gen_stringify() -> StringifyBase[{@@output_type}]: + with StringifyCtx(): + {@@stringify_init} + return OUTPUT_STRINGIFY + +OUTPUT_STRINGIFY = gen_stringify() + +{@@methods} + +{@@parser_middleware} +{@@custom_vars_middleware} + +async def parser(raw_llm_output: str) -> {@@output_type}: + return OUTPUT_STRINGIFY.parse(parser_middleware(raw_llm_output)) + +async def prompt_vars(arg: {@@input_type}) -> typing.Dict[str, str]: + vars = { + 'input': str(arg), + {@@input_props} + 'output.json': OUTPUT_STRINGIFY.json, + } + vars.update(custom_vars()) + for stringify in stringifiers: + vars.update(**stringify.vars()) + vars.update(**OUTPUT_STRINGIFY.vars()) + return vars + +Variant{@@name} = LLMVariant[{@@input_type}, {@@output_type}]( + '{@@func_name}', '{@@name}', prompt=prompt, client={@@client}, parser=parser, prompt_vars=prompt_vars) + +async def RunVariant_{@@name}(arg: {@@input_type}) -> {@@output_type}: + return await Variant{@@name}.run(arg) +)"; + +IMPL_PYTHONIC(LLMVariantNode) { + auto file = DirectoryWriter::get().file( + std::filesystem::path("functions") / (functionName + "_") / + std::string("variants") / std::string("llm_" + name + ".py")); + + for (const auto &dep : deps) { + file->add_import("....custom_types", dep); + file->add_import("....custom_types.stringify", "Stringify" + dep); + } + file->add_import("....clients", client_name); + file->add_template_var("client", client_name); + + std::unordered_map> + stringify_vars; + for (const auto &v : stringify) { + stringify_vars[v->type_name] = v; + } + + std::string stringify_init_str = ""; + for (const auto &dep : deps) { + auto res = stringify_vars.find(dep); + stringify_init_str += "stringify_" + dep + " = Stringify" + dep + "("; + if (res != stringify_vars.end()) { + // We have a custom stringify for this type + stringify_init_str += res->second->pyString(); + } + stringify_init_str += ")\n"; + stringify_init_str += "stringifiers.append(stringify_" + dep + ")\n"; + } + if (function->output->type->isCustomType()) { + stringify_init_str += "OUTPUT_STRINGIFY = stringify_" + + function->output->pythonType() + "\n "; + } else { + stringify_init_str += + "OUTPUT_STRINGIFY = " + function->output->type->toPyDescription() + + "\n"; + stringify_init_str += "stringifiers.append(OUTPUT_STRINGIFY)\n"; + } + + std::string parser_middleware = + "def parser_middleware(raw_llm_output: str) -> str:\n return " + "raw_llm_output\n"; + std::string custom_vars_middleware = + "def custom_vars() -> typing.Dict[str, str]:\n return " + "{}\n"; + std::string methods_str = ""; + for (const auto &method : methods) { + methods_str += method->toPyString(false) + "\n"; + if (method->name == "parser_middleware") { + parser_middleware = ""; + } + if (method->name == "custom_vars") { + custom_vars_middleware = ""; + } + } + + std::vector> input_types; + const auto input_type_ptr = function->input->type->class_node; + if (input_type_ptr) { + input_types.push_back({"", input_type_ptr}); + } + + std::string input_props = ""; + auto add_input_prop = [&](const std::string &prefix, ClassNode *node) { + for (const auto &prop : node->properties) { + std::string suffix = prefix + prop.name; + input_props += "\n'input." + suffix + "': str(arg." + suffix + "),"; + if (prop.type->type->class_node) { + input_types.push_back({suffix + ".", prop.type->type->class_node}); + } + } + for (const auto &prop : node->methods) { + input_props += "\n'input." + prefix + prop.name + "': str(arg." + prefix + + prop.name + "),"; + } + }; + + while (!input_types.empty()) { + auto [prefix, node] = input_types.back(); + input_types.pop_back(); + add_input_prop(prefix, node); + } + + // Remove starting whitespace + if (!input_props.empty()) { + input_props = input_props.substr(1); + } + file->add_template_var("name", name); + file->add_template_var("func_name", function->name); + file->add_template_var("input_type", function->input->pythonType()); + file->add_template_var("output_type", function->output->pythonType()); + file->add_template_var("input_props", Python::indent(input_props, 2)); + file->add_template_var("prompt", prompt); + file->add_template_var("custom_vars_middleware", custom_vars_middleware); + file->add_template_var("stringify_init", + Python::indent(stringify_init_str, 2)); + file->add_template_var("parser_middleware", parser_middleware); + file->add_template_var("methods", methods_str); + + (*file->stream()) << llm_variant_template; +} + +const std::string code_variant_impl_template = R"( +import typing + +InputType = {@@input_type} +OutputType = {@@output_type} + +async def {@@name}_impl(arg: InputType) -> OutputType: + # Write your code here + raise NotImplementedError('Code Variants must be custom implemented: {@@unique_name}') +)"; + +const std::string code_variant_template = R"( +import typing +from gloo_py import CodeVariant + +InputType = {@@input_type} +OutputType = {@@output_type} + +{@@method_str} + +Variant{@@name} = CodeVariant[InputType, OutputType]('{@@func_name}', '{@@name}', func={@@name}_impl) + +async def RunVariant_{@@name}(arg: InputType) -> OutputType: + return await Variant{@@name}.run(arg) +)"; + +IMPL_PYTHONIC(CodeVariantNode) { + auto file = DirectoryWriter::get().file( + std::filesystem::path("functions") / (functionName + "_") / + std::string("variants") / std::string("code_" + name + ".py")); + + std::unordered_set func_dependencies; + for (const auto &dep : usedFunction) { + func_dependencies.insert(dep); + file->add_import("..." + dep + "_", dep); + } + + for (const auto &dep : deps) { + if (func_dependencies.find(dep) != func_dependencies.end()) { + continue; + } + file->add_import("....custom_types", dep); + } + + std::string method_str = ""; + for (const auto &method : methods) { + method_str += method->toPyString(false) + "\n"; + } + + auto impl_method = + std::find_if(methods.begin(), methods.end(), + [](const auto &method) { return method->name == "impl"; }); + if (impl_method != methods.end()) { + method_str += name + "_impl = impl"; + } else { + file->add_import(".code_" + name + "_impl", name + "_impl"); + } + + file->add_template_var("name", name); + file->add_template_var("func_name", function->name); + file->add_template_var("input_type", function->input->pythonType()); + file->add_template_var("output_type", function->output->pythonType()); + file->add_template_var("method_str", method_str); + file->add_template_var("unique_name", uniqueName()); + + (*file->stream()) << code_variant_template; + + if (impl_method == methods.end()) { + auto impl_file = DirectoryWriter::get().file( + std::filesystem::path("functions") / (functionName + "_") / + std::string("variants") / std::string("code_" + name + "_impl.py")); + impl_file->add_template_var("name", name); + impl_file->add_template_var("unique_name", uniqueName()); + impl_file->add_template_var("input_type", function->input->pythonType()); + impl_file->add_template_var("output_type", function->output->pythonType()); + + for (const auto &dep : usedFunction) { + impl_file->add_import("..." + dep, dep); + } + + for (const auto &dep : deps) { + if (func_dependencies.find(dep) != func_dependencies.end()) { + continue; + } + impl_file->add_import("....custom_types", dep); + } + + (*impl_file->stream()) << code_variant_impl_template; + } +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/functions/node_function.cc b/cli/cpp_src/variant/generate/python/functions/node_function.cc new file mode 100644 index 000000000..34d21f1a4 --- /dev/null +++ b/cli/cpp_src/variant/generate/python/functions/node_function.cc @@ -0,0 +1,83 @@ + +#include "variant/ast/functions/node_function.h" + +#include + +#include "variant/generate/dir_writer.h" + +namespace gloo::AST { + +std::string toNameList( + const std::vector> &variants) { + if (variants.empty()) { + return "typing.Never"; + } + std::string res = "typing.Literal["; + for (const auto &variant : variants) { + res += "'" + variant->name + "', "; + } + res = res.substr(0, res.size() - 2); + return res + "]"; +} + +const std::string function_variant_template = R"( + if variant == '{variant_name}': + return await RunVariant_{variant_name}(args) +)"; + +const std::string function_template = R"( +import typing + +VariantTypes = {@@variant_types} + +async def {@@name}(variant: VariantTypes, args: {@@input_type}) -> {@@output_type}: +{@@variant_impls} + raise NotImplementedError(f'Variant not found: {@@name}::{variant}') + +__all__ = ["{@@name}", "VariantTypes"] +)"; + +IMPL_PYTHONIC(FunctionNode) { + (void)deps; // UNUSED + auto file = + DirectoryWriter::get().file(std::filesystem::path("functions") / + (name + "_") / std::string("__init__.py")); + + for (const auto &dep : dependencies()) { + file->add_import("...custom_types", dep); + } + std::string variant_imports_str = ""; + for (const auto &variant : variants) { + file->add_import(".variants", "RunVariant_" + variant->name); + } + const std::string variant_types_str = toNameList(variants); + + std::string variant_impls_str = ""; + for (const auto &variant : variants) { + variant_impls_str += + std::regex_replace(function_variant_template, + std::regex("\\{variant_name\\}"), variant->name); + } + + file->add_template_var("name", name); + file->add_template_var("input_type", input->pythonType()); + file->add_template_var("output_type", output->pythonType()); + file->add_template_var("variant_imports", variant_imports_str); + file->add_template_var("variant_types", variant_types_str); + file->add_template_var("variant_impls", variant_impls_str); + (*file->stream()) << function_template; + DirectoryWriter::get() + .file(std::filesystem::path("functions") / std::string("__init__.py")) + ->add_import("." + name + "_", name, /*export=*/true); + + // Write all variant imports in functions/{func}/variants/__init__.py + auto variants_file = DirectoryWriter::get().file( + std::filesystem::path("functions") / (name + "_") / + std::string("variants/__init__.py")); + for (const auto &variant : variants) { + variants_file->add_import("." + variant->type() + "_" + variant->name, + "RunVariant_" + variant->name, /*export=*/true); + } +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/shared/node_method.cc b/cli/cpp_src/variant/generate/python/shared/node_method.cc new file mode 100644 index 000000000..53f7f737b --- /dev/null +++ b/cli/cpp_src/variant/generate/python/shared/node_method.cc @@ -0,0 +1,29 @@ +#include "variant/ast/shared/node_method.h" + +#include "variant/error.h" + +namespace gloo::AST { + +std::string MethodNode::toPyString(bool with_usage) const { + // Find python lang + auto it = std::find_if(langs.begin(), langs.end(), [](const auto &lang) { + return lang.language == Language::PYTHON; + }); + if (it == langs.end()) { + throw SyntaxError(token, "No python implementation for method " + name); + } + + std::string result = it->code + "\n"; + + if (with_usage) { + const bool is_async = it->code.starts_with("async"); + if (is_async) { + result += "await "; + } + result += name + "(arg, output)\n"; + } + + return result; +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/types/node_class.cc b/cli/cpp_src/variant/generate/python/types/node_class.cc new file mode 100644 index 000000000..ce02e86ab --- /dev/null +++ b/cli/cpp_src/variant/generate/python/types/node_class.cc @@ -0,0 +1,90 @@ +#include "variant/ast/types/node_class.h" + +#include +#include + +#include "variant/generate/dir_writer.h" +#include "variant/generate/python/utils.h" + +namespace gloo::AST { + +const std::string class_template = R"( +import typing +from pydantic import BaseModel +from gloo_py.stringify import ( + StringifyBase, + StringifyError, + StringifyNone, + StringifyBool, + StringifyInt, + StringifyChar, + StringifyString, + StringifyFloat, + StringifyEnum, + StringifyUnion, + StringifyOptional, + StringifyList, + StringifyClass, + FieldDescription, + EnumFieldDescription, + StringifyRemappedField, + StringifyCtx +) + +class {@@name}(BaseModel): + {@@properties} + +class Stringify{@@name}(StringifyClass[{@@name}]): + def __init__(self, **update_kwargs: StringifyRemappedField) -> None: + values: typing.Dict[str, FieldDescription[typing.Any]] = {{@@property_descriptions}} + super().__init__(model={@@name}, values=values, updates=update_kwargs) +)"; + +const std::string property_template = + R"("{name}": FieldDescription(name="{name}", description=None, type_desc={type_desc}),)"; + +std::string PropertyNode::pythonDescription() const { + std::unordered_map replacements = { + {"name", name}, + {"type_desc", type->pythonDescription()}, + }; + std::string output = property_template; + for (const auto &[key, value] : replacements) { + output = std::regex_replace(output, std::regex("\\{" + key + "\\}"), value); + } + return output; +} + +void ClassNode::toPython(const std::vector &deps) const { + auto file = DirectoryWriter::get().file("custom_types/" + name + "_.py"); + + for (const auto &dep : deps) { + file->add_import("." + dep + "_", dep); + file->add_import("." + dep + "_", "Stringify" + dep); + } + std::string properties_str = ""; + std::string property_descriptions = ""; + for (const auto &field : this->properties) { + properties_str += field.name + ": " + field.type->pythonType() + + field.type->type->defaultValue() + "\n"; + property_descriptions += field.pythonDescription(); + } + for (const auto &method : this->methods) { + properties_str += method.toPyString(false) + "\n"; + } + + file->add_template_var("name", name); + file->add_template_var("properties", Python::indent(properties_str, 1)); + file->add_template_var("property_descriptions", property_descriptions); + (*file->stream()) << class_template; + + DirectoryWriter::get() + .file(std::filesystem::path("custom_types") / std::string("__init__.py")) + ->add_import("." + name + "_", name, /*export=*/true); + + gloo::DirectoryWriter::get() + .file("custom_types/stringify.py") + ->add_import("." + name + "_", "Stringify" + name, /*export=*/true); +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/types/node_enum.cc b/cli/cpp_src/variant/generate/python/types/node_enum.cc new file mode 100644 index 000000000..50fd68735 --- /dev/null +++ b/cli/cpp_src/variant/generate/python/types/node_enum.cc @@ -0,0 +1,63 @@ +#include "variant/ast/types/node_enum.h" + +#include "variant/generate/dir_writer.h" +#include "variant/generate/python/utils.h" + +namespace gloo::AST { + +const std::string enum_template = R"( +import typing +from enum import Enum +from gloo_py.stringify import ( + StringifyBase, + StringifyError, + StringifyNone, + StringifyBool, + StringifyInt, + StringifyChar, + StringifyString, + StringifyFloat, + StringifyEnum, + StringifyUnion, + StringifyOptional, + StringifyList, + StringifyClass, + FieldDescription, + EnumFieldDescription, + StringifyRemappedField, + StringifyCtx +) + +class {@@name}(str, Enum): + {@@values} + +class Stringify{@@name}(StringifyEnum[{@@name}]): + def __init__(self, **update_kwargs: StringifyRemappedField) -> None: + values = { + v: EnumFieldDescription(name=v.value, description=None, skip=False) + for v in {@@name} + } + super().__init__(values=values, updates=update_kwargs) +)"; + +IMPL_PYTHONIC(EnumNode) { + (void)deps; // UNUSED + auto file = DirectoryWriter::get().file("custom_types/" + name + "_.py"); + file->add_template_var("name", name); + + std::string values_str = ""; + for (const auto &value : values) { + values_str += value + " = \"" + value + "\"\n"; + } + file->add_template_var("values", Python::indent(values_str, 1)); + (*file->stream()) << enum_template; + + DirectoryWriter::get() + .file(std::filesystem::path("custom_types") / std::string("__init__.py")) + ->add_import("." + name + "_", name, /*export=*/true); + gloo::DirectoryWriter::get() + .file("custom_types/stringify.py") + ->add_import("." + name + "_", "Stringify" + name, /*export=*/true); +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/types/node_llm_client.cc b/cli/cpp_src/variant/generate/python/types/node_llm_client.cc new file mode 100644 index 000000000..e047c0c5a --- /dev/null +++ b/cli/cpp_src/variant/generate/python/types/node_llm_client.cc @@ -0,0 +1,53 @@ +#include "variant/ast/types/node_llm_client.h" + +#include "variant/generate/dir_writer.h" +#include "variant/generate/python/utils.h" + +namespace gloo::AST { + +const std::string llm_client_template = R"( +from gloo_py import llm_client_factory, ENV + +{@@name} = llm_client_factory(provider='{@@provider}', {@@params}) +)"; + +IMPL_PYTHONIC(LLMClientNode) { + (void)deps; // UNUSED, allow for -Wunused-value + auto file = DirectoryWriter::get().file("clients/llm_" + name + ".py"); + + file->add_template_var("name", name); + file->add_template_var("provider", provider); + + std::string params = ""; + for (const auto& [key, v] : args) { + params += key + "=" + Python::AsValue(v) + ", "; + } + if (num_retries > 0) { + params += "__retries__=" + std::to_string(num_retries) + ", "; + } + if (default_fallback_client.has_value()) { + file->add_import(".llm_" + default_fallback_client.value(), + default_fallback_client.value()); + params += "__default_fallback__=" + default_fallback_client.value() + ", "; + } + if (fallback_clients.size() > 0) { + params += "__fallback__={"; + for (const auto& [code, client] : fallback_clients) { + file->add_import(".llm_" + client, client); + params += std::to_string(code) + ": " + client + ", "; + } + params = params.substr(0, params.size() - 2) + "}, "; + } + + params = params.substr(0, params.size() - 2); + + file->add_template_var("params", params); + + (*file->stream()) << llm_client_template; + + DirectoryWriter::get() + .file(std::filesystem::path("clients") / std::string("__init__.py")) + ->add_import(".llm_" + name, name, /*export=*/true); +} + +} // namespace gloo::AST diff --git a/cli/cpp_src/variant/generate/python/utils.h b/cli/cpp_src/variant/generate/python/utils.h new file mode 100644 index 000000000..ce6f42a66 --- /dev/null +++ b/cli/cpp_src/variant/generate/python/utils.h @@ -0,0 +1,165 @@ +#pragma once + +#include +#include + +#include "variant/ast/shared/node_type.h" + +namespace gloo::Python { + +const char pairs[][2] = { + {'(', ')'}, {'[', ']'}, {'{', '}'}, {'"', '"'}, {'\'', '\''}}; + +inline bool isConstructor(const std::string& value) { + // Check if the string has matching pairs of (), [], {} + std::vector stack; + char in_string = '\0'; + for (const auto& c : value) { + if (in_string != '\0') { + if (c == in_string) { + in_string = '\0'; + stack.pop_back(); + } + continue; + } + for (const auto& [open, close] : pairs) { + if (c == '"' || c == '\'') { + in_string = c; + stack.push_back(c); + break; + } + if (c == open) { + stack.push_back(open); + } else if (c == close) { + if (stack.empty() || stack.back() != open) { + return false; + } + stack.pop_back(); + } + } + } + + bool is_constructor = stack.empty(); + + // Check if the string is of the form CLASS_NAME(...) + if (is_constructor) { + std::regex re("^[a-zA-Z_]\\w*\\([\\s\\S]*\\)\\s*$"); + is_constructor = std::regex_match(value, re); + } + + return is_constructor; +} + +inline std::string AsValue(const std::string& value) { + if (value.find("Conversation(thread=") != std::string::npos) { + } + // if the string is a number, return it as is + if (std::regex_match(value, std::regex("[-+]?[0-9]*\\.?[0-9]+"))) { + return value; + } + if (value == "true" || value == "True") { + return "True"; + } + if (value == "false" || value == "False") { + return "False"; + } + + for (const auto& [open, close] : pairs) { + if (value[0] == open && value[value.size() - 1] == close) { + return value; + } + } + // if string is an f-string, return it as is + if (value[0] == 'f' && value[1] == value[value.size() - 1] && + (value[1] == '"' || value[1] == '\'')) { + return value; + } + // Special value of the form @ENV.VAR_NAME should be returned + // without the @ + if (std::regex_match(value, std::regex("@ENV\\.[a-zA-Z_]\\w*"))) { + return value.substr(1); + } + // Special case for None + if (value == "None") { + return value; + } + // Special case for empty string + if (value == "") { + return "''"; + } + // Special case for Python constructors + if (isConstructor(value)) { + return value; + } + + return "'''" + value + "'''"; +} + +inline std::string AsValue(const std::shared_ptr& node, + const std::string& value) { + const auto type = node->toString(); + if (type == "bool") { + if (value == "true" || value == "True") { + return "True"; + } + if (value == "false" || value == "False") { + return "False"; + } + throw SyntaxError(node->token, "Invalid boolean value: " + value); + } + + if (type == "int" || type == "float") { + // if the string is a number, return it as is + if (std::regex_match(value, std::regex("[-+]?[0-9]*\\.?[0-9]+"))) { + return value; + } + throw SyntaxError(node->token, "Invalid number value: " + value); + } + + if (!(node->toString() == "string" || node->toString() == "char")) { + return value; + } + + if (value[0] == '"' && value[value.size() - 1] == '"') { + return value; + } + + if (value[0] == '\'' && value[value.size() - 1] == '\'') { + return value; + } + + // if string is an f-string, return it as is + if (value[0] == 'f' && value[1] == value[value.size() - 1] && + (value[1] == '"' || value[1] == '\'')) { + return value; + } + // Special value of the form @ENV.VAR_NAME should be returned + // without the @ + if (std::regex_match(value, std::regex("@ENV\\.[a-zA-Z_]\\w*"))) { + return value.substr(1); + } + // Special case for empty string + if (value == "") { + return "''"; + } + + return "'''" + value + "'''"; +} + +inline std::string indent(const std::string& value, int level) { + std::string output = ""; + for (int i = 0; i < level; i++) { + output += " "; + } + // deal with the case where the value is a multiline string by replacing + // newlines with newlines + indent + std::string indent_str = "\n" + output; + std::string indent_value = + std::regex_replace(value, std::regex("\n"), indent_str); + // Remove leading and trailing newlines + indent_value = std::regex_replace(indent_value, std::regex("^\\n+"), ""); + indent_value = std::regex_replace(indent_value, std::regex("\\n+$"), ""); + return indent_value; +} + +} // namespace gloo::Python \ No newline at end of file diff --git a/cli/cpp_src/variant/post_process/dependency_graph.cc b/cli/cpp_src/variant/post_process/dependency_graph.cc new file mode 100644 index 000000000..2e5cd62ac --- /dev/null +++ b/cli/cpp_src/variant/post_process/dependency_graph.cc @@ -0,0 +1,133 @@ +#include "variant/post_process/dependency_graph.h" + +#include +#include + +#include "variant/common.h" +#include "variant/error.h" + +namespace gloo::PostProcess { + +std::pair>, + std::unordered_map>> +BuildDependencyGraph(const AST::Nodes &nodes) { + std::unordered_map, + std::vector>> + deps; + + // Now validate all the enums, classes, tasks, runners, and pipelines + for (const auto &node : nodes.enums) { + deps[node->uniqueName()] = {node, {}}; + } + + for (const auto &node : nodes.clients) { + deps[node->uniqueName()] = {node, node->dependencies()}; + } + + for (const auto &node : nodes.classes) { + deps[node->uniqueName()] = {node, node->dependencies()}; + } + + for (const auto &node : nodes.functions) { + auto func_deps = node->dependencies(); + + deps[node->uniqueName()] = {node, func_deps}; + } + + for (const auto &[func, variants] : nodes.function_variants) { + for (const auto &variant : variants) { + auto func_deps = variant->dependencies(); + + deps[variant->uniqueName()] = {variant, func_deps}; + } + } + + for (const auto &[func, test_groups] : nodes.function_test_groups) { + for (const auto &group : test_groups) { + deps[group->uniqueName()] = {group, {func}}; + } + } + + std::unordered_map> name_to_dep; + for (const auto &it : deps) { + name_to_dep[it.first] = it.second.second; + } + + // Generate the order in which to process the nodes using a topological sort + std::vector> order; + std::unordered_map order_map; + + std::vector next_in_line; + for (const auto &dep : deps) { + if (dep.second.second.size() == 0) { + next_in_line.push_back(dep.first); + } + } + + int max_loops = 0; + while (next_in_line.size() > 0 && max_loops++ < 1000) { + // Remove the nodes that are next in line + for (const auto &name : next_in_line) { + order.push_back(deps[name].first); + order_map[name] = max_loops; + deps.erase(name); + } + + // Remove them from the dependencies of other nodes + for (const auto &name : next_in_line) { + for (auto &dep : deps) { + auto &dependencies = dep.second.second; + dependencies.erase( + std::remove(dependencies.begin(), dependencies.end(), name), + dependencies.end()); + } + } + + next_in_line.clear(); + for (const auto &dep : deps) { + if (dep.second.second.size() == 0) { + next_in_line.push_back(dep.first); + } + } + } + + if (deps.size() > 0) { + std::string error = ""; + for (const auto &dep : deps) { + error += dep.first + " "; + } + throw CircularDependencyError(deps.begin()->second.first->token, error); + } + + std::sort(order.begin(), order.end(), + [&](const std::shared_ptr &a, + const std::shared_ptr &b) { + if (order_map[a->name] == order_map[b->name]) { + if (a->order() == b->order()) { + return a->token.line < b->token.line; + } + return a->order() < b->order(); + } + return order_map[a->name] < order_map[b->name]; + }); + + // Now that we have the order, for each dependency, recurively add the + // dependencies of the dependency. This will ensure that the dependencies are + // added in the correct order. + + for (const auto &it : order) { + std::vector name_deps = name_to_dep[it->uniqueName()]; + // update the dependencies to be the dependencies of the dependencies + std::unordered_set new_deps(name_deps.begin(), + name_deps.end()); + for (const auto &dep : name_deps) { + auto dep_deps = name_to_dep[dep]; + new_deps.insert(dep_deps.begin(), dep_deps.end()); + } + name_to_dep[it->uniqueName()] = + std::vector(new_deps.begin(), new_deps.end()); + } + + return std::make_pair(order, name_to_dep); +} +}; // namespace gloo::PostProcess diff --git a/cli/cpp_src/variant/post_process/dependency_graph.h b/cli/cpp_src/variant/post_process/dependency_graph.h new file mode 100644 index 000000000..4898983e9 --- /dev/null +++ b/cli/cpp_src/variant/post_process/dependency_graph.h @@ -0,0 +1,12 @@ +#pragma once +#include +#include +#include + +#include "variant/ast/ast.h" + +namespace gloo::PostProcess { +std::pair>, + std::unordered_map>> +BuildDependencyGraph(const AST::Nodes &nodes); +}; diff --git a/cli/cpp_src/variant/post_process/validate.cc b/cli/cpp_src/variant/post_process/validate.cc new file mode 100644 index 000000000..be5e75e80 --- /dev/null +++ b/cli/cpp_src/variant/post_process/validate.cc @@ -0,0 +1,122 @@ +#include "variant/post_process/validate.h" + +#include +#include + +#include "variant/common.h" +#include "variant/error.h" + +namespace gloo::PostProcess { +void Validate(const AST::Nodes &nodes) { + // First pass get all names of enums, classes, tasks, runners, and pipelines + std::unordered_set all_names; + std::unordered_set enum_names; + std::unordered_set class_names; + std::unordered_set function_names; + std::unordered_set client_names; + + auto checkDupes = [&](std::unordered_set &target, + const std::shared_ptr &node, + const std::string &name) { + validateIdentifier(node->token, name); + + if (all_names.find(name) != all_names.end()) { + throw DuplicateError({node->token}, name + " in " + node->toString()); + } + all_names.insert(name); + target.insert(name); + }; + + for (const auto &node : nodes.enums) { + checkDupes(enum_names, node, node->name); + } + for (const auto &node : nodes.classes) { + checkDupes(class_names, node, node->name); + } + for (const auto &node : nodes.functions) { + checkDupes(function_names, node, node->name); + } + for (const auto &node : nodes.clients) { + checkDupes(client_names, node, node->name); + } + for (const auto &function_variant : nodes.function_variants) { + std::string func = function_variant.first; + auto variants = function_variant.second; + // Find the function node in nodes.functions by func + auto it = std::find_if(nodes.functions.begin(), nodes.functions.end(), + [&](const std::shared_ptr &node) { + return node->name == func; + }); + + std::unordered_set names; + for (const auto &variant : variants) { + if (it == nodes.functions.end()) { + throw SyntaxError(variant->token, "Function not found: " + func); + } + validateIdentifier(variant->token, variant->name); + if (names.find(variant->name) != names.end()) { + throw DuplicateError({variant->token}, + variant->name + "\n" + variant->toString()); + } + names.insert(variant->name); + (*it)->addVariant(variant); + } + } + + for (const auto &function_test_group : nodes.function_test_groups) { + std::string func = function_test_group.first; + // Find the function node in nodes.functions by func + auto it = std::find_if(nodes.functions.begin(), nodes.functions.end(), + [&](const std::shared_ptr &node) { + return node->name == func; + }); + + std::unordered_set names; + auto test_groups = function_test_group.second; + for (const auto &test_group : test_groups) { + if (it == nodes.functions.end()) { + throw SyntaxError(test_group->token, "Function not found: " + func); + } + validateIdentifier(test_group->token, test_group->name); + if (names.find(test_group->name) != names.end()) { + throw DuplicateError({test_group->token}, + test_group->name + "\n" + test_group->toString()); + } + names.insert(test_group->name); + test_group->validate(function_names); + (*it)->addTestGroup(test_group); + } + } + + // Now validate all the enums, classes, tasks, runners, and pipelines + for (const auto &node : nodes.clients) { + node->validate(client_names); + } + + for (const auto &node : nodes.enums) { + node->validate(); + } + + for (const auto &node : nodes.classes) { + node->validate(class_names, enum_names); + } + + for (const auto &node : nodes.functions) { + node->validate(class_names, enum_names); + } + + for (const auto &[func, variants] : nodes.function_variants) { + for (const auto &variant : variants) { + variant->validate(class_names, enum_names, function_names, client_names); + } + } + + // Link all types. + for (const auto &node : nodes.classes) { + node->link(nodes.classes, nodes.enums); + } + for (const auto &node : nodes.functions) { + node->link(nodes.classes, nodes.enums); + } +} +}; // namespace gloo::PostProcess diff --git a/cli/cpp_src/variant/post_process/validate.h b/cli/cpp_src/variant/post_process/validate.h new file mode 100644 index 000000000..b45bf569a --- /dev/null +++ b/cli/cpp_src/variant/post_process/validate.h @@ -0,0 +1,7 @@ +#pragma once + +#include "variant/ast/ast.h" + +namespace gloo::PostProcess { +void Validate(const AST::Nodes &nodes); +}; diff --git a/cli/cpp_src/variant/tokenizer/tokenizer.cc b/cli/cpp_src/variant/tokenizer/tokenizer.cc new file mode 100644 index 000000000..81f4562dc --- /dev/null +++ b/cli/cpp_src/variant/tokenizer/tokenizer.cc @@ -0,0 +1,211 @@ +#include "variant/tokenizer/tokenizer.h" + +#include +#include +#include + +#include "variant/error.h" + +namespace gloo::Tokenizer { +TokenKind GetIdentifier(const std::string &str) { + if (str == "enum") { + return TokenKind::EnumKeyword; + } + if (str == "class") { + return TokenKind::ClassKeyword; + } + if (str == "function") { + return TokenKind::FunctionKeyword; + } + if (str == "method") { + return TokenKind::MethodKeyword; + } + if (str == "prompt") { + return TokenKind::PromptKeyword; + } + if (str == "input") { + return TokenKind::InputKeyword; + } + if (str == "output") { + return TokenKind::OutputKeyword; + } + if (str == "depends_on") { + return TokenKind::DependsOnKeyword; + } + if (str == "method") { + return TokenKind::MethodKeyword; + } + if (str.starts_with("lang[") && str.ends_with("]")) { + return TokenKind::Lang; + } + if (str.starts_with("variant[") && str.ends_with("]")) { + return TokenKind::VariantKeyword; + } + if (str == "test_group") { + return TokenKind::TestGroupKeyword; + } + if (str == "case") { + return TokenKind::TestCaseKeyword; + } + if (str.starts_with("client[") && str.ends_with("]")) { + return TokenKind::ClientKeyword; + } + if (str == "provider") { + return TokenKind::ProviderKeyword; + } + if (str == "rename") { + return TokenKind::AliasKeyword; + } + if (str == "describe") { + return TokenKind::DescriptionKeyword; + } + if (str == "skip") { + return TokenKind::SkipKeyword; + } + if (str == "stringify") { + return TokenKind::StringifyKeyword; + } + if (str == "retry") { + // || (str.starts_with("retry[") && str.ends_with("]"))) { + return TokenKind::RetryKeyword; + } + if (str == "fallback" || + (str.starts_with("fallback[") && str.ends_with("]"))) { + return TokenKind::FallbackKeyword; + } + return TokenKind::Identifier; +} + +std::vector Tokenize(const std::string &file, const std::string &str) { + std::vector tokens; + int line = 1; + // Read the string one line at a time. + std::istringstream iss(str); + std::string line_str; + while (std::getline(iss, line_str)) { + int column = 1; + // Read the line one character at a time until whitespace which would be a + // token. + std::string token_str; + int atSymbolCol = -1; + auto maybeAddIdentifier = [&]() { + if (token_str.length() > 0) { + TokenKind kind = + atSymbolCol >= 0 ? GetIdentifier(token_str) : TokenKind::Identifier; + tokens.push_back({file, line, column, kind, token_str}); + column += static_cast(token_str.length()); + token_str.clear(); + } + atSymbolCol = -1; + }; + + for (char c : line_str) { + switch (c) { + case ',': + maybeAddIdentifier(); + tokens.push_back({file, line, column, TokenKind::Comma, ","}); + column++; + break; + case ':': + maybeAddIdentifier(); + tokens.push_back({file, line, column, TokenKind::Colon, ":"}); + column++; + break; + case '{': + maybeAddIdentifier(); + tokens.push_back( + {file, line, column, TokenKind::LeftCurlyBracket, "{"}); + column++; + break; + case '}': + maybeAddIdentifier(); + tokens.push_back( + {file, line, column, TokenKind::RightCurlyBracket, "}"}); + column++; + break; + case '@': + maybeAddIdentifier(); + tokens.push_back({file, line, column, TokenKind::AtSymbol, "@"}); + atSymbolCol = column; + column++; + break; + case ' ': // Whitespace + case '\t': + case '\n': + case '\r': + maybeAddIdentifier(); + column++; + break; + default: + token_str += c; + break; + } + } + maybeAddIdentifier(); + line++; + } + tokens.push_back({file, line, 1, TokenKind::Eof, "[EOF]"}); + return tokens; +} + +std::string TokenKindToString(TokenKind kind) { + switch (kind) { + case TokenKind::Comma: + return "comma (,)"; + case TokenKind::Colon: + return "colon (:)"; + case TokenKind::RightCurlyBracket: + return "right curly bracket (})"; + case TokenKind::LeftCurlyBracket: + return "left curly bracket ({)"; + case TokenKind::AtSymbol: + return "at symbol (@)"; + case TokenKind::EnumKeyword: + return "@enum"; + case TokenKind::ClassKeyword: + return "@class"; + case TokenKind::FunctionKeyword: + return "@function"; + case TokenKind::MethodKeyword: + return "@method"; + case TokenKind::PromptKeyword: + return "@prompt"; + case TokenKind::InputKeyword: + return "@input"; + case TokenKind::OutputKeyword: + return "@output"; + case TokenKind::DependsOnKeyword: + return "@depends_on"; + case TokenKind::TestGroupKeyword: + return "@test_group"; + case TokenKind::TestCaseKeyword: + return "@case"; + case TokenKind::VariantKeyword: + return "@variant[*]"; + case TokenKind::Lang: + return "@lang[*]"; + case TokenKind::ClientKeyword: + return "@client[*]"; + case TokenKind::ProviderKeyword: + return "@provider"; + case TokenKind::AliasKeyword: + return "@rename"; + case TokenKind::DescriptionKeyword: + return "@describe"; + case TokenKind::SkipKeyword: + return "@skip"; + case TokenKind::StringifyKeyword: + return "@stringify"; + case TokenKind::RetryKeyword: + return "@retry"; + case TokenKind::FallbackKeyword: + return "@fallback[code]"; + case TokenKind::Identifier: + return "[identifier]"; + // Add cases for any other TokenKinds you might have. + default: + return "unknown token"; + } +} + +} // namespace gloo::Tokenizer diff --git a/cli/cpp_src/variant/tokenizer/tokenizer.h b/cli/cpp_src/variant/tokenizer/tokenizer.h new file mode 100644 index 000000000..a2c28a298 --- /dev/null +++ b/cli/cpp_src/variant/tokenizer/tokenizer.h @@ -0,0 +1,58 @@ +#pragma once +#include +#include + +namespace gloo::Tokenizer { +enum TokenKind { + RightCurlyBracket, + LeftCurlyBracket, + Colon, + Comma, + AtSymbol, + + EnumKeyword, + + MethodKeyword, + + ClassKeyword, + + FunctionKeyword, + InputKeyword, + OutputKeyword, + + ClientKeyword, + ProviderKeyword, + RetryKeyword, + FallbackKeyword, + + VariantKeyword, + PromptKeyword, + StringifyKeyword, + AliasKeyword, + DescriptionKeyword, + SkipKeyword, + + DependsOnKeyword, + + TestGroupKeyword, + TestCaseKeyword, + // Language specific + Lang, + // Catch all + Identifier, + // End of file + Eof, +}; + +struct Token { + std::string file; + int line; + int column; + TokenKind kind; + std::string value; +}; + +std::vector Tokenize(const std::string& file, const std::string& str); + +std::string TokenKindToString(TokenKind kind); +} // namespace gloo::Tokenizer diff --git a/cli/data/main.gloo.template b/cli/data/main.gloo.template new file mode 100644 index 000000000..efb1040dd --- /dev/null +++ b/cli/data/main.gloo.template @@ -0,0 +1,48 @@ +@client[llm] GPT35Client { + @provider openai + model gpt-3.5-turbo + temperature 0 + api_key @ENV.OPENAI_API_KEY +} + +@client[llm] GPT4Client { + @provider openai + model gpt-4 + temperature 0 + api_key @ENV.OPENAI_API_KEY +} + + +@function ClassifySentiment { + @input string + @output bool +} + +@test_group MyTests for ClassifySentiment { + @case MyCase1 { + @input I am very very happy today. + } + + @case MyCase2 { + @input I am ecstatic! + } + + @case MyCase3 { + @input { + someone stole my bag + and it sucked. + } + } +} + +@variant[llm] v1 for ClassifySentiment { + @client[llm] GPT35Client + @prompt { + Tell me if the following input is happy or not. + + INPUT: + {@input} + + True / False: + } +} diff --git a/cli/src/main.rs b/cli/src/main.rs new file mode 100644 index 000000000..f9e5888ab --- /dev/null +++ b/cli/src/main.rs @@ -0,0 +1,437 @@ +#[macro_use] +extern crate log; +use colored::*; +use std::fs; +use std::fs::File; +use std::io::{self, Read, Write}; +use std::path::{Path, PathBuf}; +use yaml_rust::{Yaml, YamlEmitter, YamlLoader}; + +use clap::App; + +extern crate libc; +mod utils; +extern "C" { + fn receive_data( + output_dir: *const libc::c_char, + filenames: *const *const libc::c_char, + contents: *const *const libc::c_char, + len: libc::c_int, + error_msg: *mut libc::c_char, + ) -> libc::c_int; +} + +fn is_poetry_enabled() -> bool { + use std::process::Command; + + let output = Command::new("poetry").arg("--version").output(); + + match output { + Ok(output) => output.status.success(), + Err(_) => false, + } +} + +fn add_gloo_lib() -> Result<(), &'static str> { + use std::process::Command; + if is_poetry_enabled() { + println!("{}", "Adding gloo dependencies...".dimmed()); + let output = Command::new("poetry") + .arg("add") + .arg("gloo-lib@latest") + .arg("--no-cache") + .output() + .expect("Failed to execute command"); + + if output.status.success() { + println!("{}", "Successfully added gloo-py to the project.".green()); + Ok(()) + } else { + Err("Failed to add gloo-py.") + } + } else { + println!("{}", "Adding gloo dependencies...".dimmed()); + let output = Command::new("pip") + .arg("install") + .arg("gloo-lib") + .arg("--upgrade") + .output() + .expect("Failed to execute command"); + + if output.status.success() { + println!("{}", "Successfully added gloo-py to the project.".green()); + Ok(()) + } else { + Err("Failed to add gloo-py.") + } + } +} + +fn init_command(_init_matches: &clap::ArgMatches) { + // Check if gloo.yaml already exists + if Path::new("gloo.yaml").exists() { + // At this point gloo_lib should already be in the package deps + println!("{}", "Looks like gloo init has already been run. Delete gloo.yaml to override your existing configuration.".blue()); + return; + } + + // Default values + let default_output_dir = "./generated"; + let default_gloo_dir = "./gloo"; + + // Ask the user for the output directory + print!( + "{}", + format!( + "Enter the output directory for generated code (default: {}): ", + default_output_dir + ) + .green() + ); + io::stdout().flush().unwrap(); + let mut output_dir = String::new(); + io::stdin().read_line(&mut output_dir).unwrap(); + if output_dir.trim().is_empty() { + output_dir = default_output_dir.to_string(); + } + + // Ask the user for the .gloo files directory + print!( + "{}", + format!( + "Enter the directory to store .gloo files (default: {}): ", + default_gloo_dir + ) + .green() + ); + io::stdout().flush().unwrap(); + let mut gloo_dir = String::new(); + io::stdin().read_line(&mut gloo_dir).unwrap(); + if gloo_dir.trim().is_empty() { + gloo_dir = default_gloo_dir.to_string(); + } + + // Create a YAML document with the user's input + let doc = Yaml::Hash( + vec![ + ( + Yaml::String("output_dir".to_string()), + Yaml::String(output_dir.trim().to_string()), + ), + ( + Yaml::String("gloo_dir".to_string()), + Yaml::String(gloo_dir.trim().to_string()), + ), + ] + .into_iter() + .collect(), + ); + let mut out_str = String::new(); + { + let mut emitter = YamlEmitter::new(&mut out_str); + emitter.dump(&doc).unwrap(); // dump the YAML object to a String + } + + // Write the YAML document to gloo.yaml in the current directory + let mut file = File::create(Path::new("gloo.yaml")).unwrap(); + file.write_all(out_str.as_bytes()).unwrap(); + + // Create the gloo dir + let gloo_path = Path::new(&gloo_dir); + if !gloo_path.exists() { + std::fs::create_dir_all(gloo_path).unwrap(); + } + + // create a main.gloo file in the gloo dir as a text file + let template = include_str!("../data/main.gloo.template"); + let mut file = File::create(gloo_path.join("main.gloo")).unwrap(); + + // copy the contents from ./main.gloo.template into the main.gloo + file.write_all(template.as_bytes()).unwrap(); + + match add_gloo_lib() { + Ok(_) => (), + Err(e) => { + println!("{}", e.red()); + return; + } + } + + // emit a message to the user that they can create their pipeline in the main.gloo file in a purple color + println!( + "{}", + format!( + "You can now create your LLM functions in {}/main.gloo !", + gloo_dir + ) + .purple() + ); +} + +fn load_and_parse_yaml() -> (Yaml, PathBuf, PathBuf, PathBuf) { + // Check if gloo.yaml exists in the current directory or any parent directory + let mut current_dir = std::env::current_dir().unwrap(); + loop { + let gloo_path = current_dir.join("gloo.yaml"); + if gloo_path.exists() { + break; + } + if !current_dir.pop() { + error!("gloo.yaml not found in the current directory or any parent directory. Have you run gloo init?"); + std::process::exit(1); + } + } + + // Load the YAML file using yaml_rust + let mut yaml_file = File::open(current_dir.join("gloo.yaml")).unwrap(); + let mut yaml_string = String::new(); + yaml_file.read_to_string(&mut yaml_string).unwrap(); + let yaml_docs = YamlLoader::load_from_str(&yaml_string).unwrap(); + let yaml = &yaml_docs[0]; // get the first document + + let version = yaml["version"].as_str().unwrap_or(""); + if version == "" { + } else if semver::Version::parse(version).unwrap() + > semver::Version::parse(env!("CARGO_PKG_VERSION")).unwrap() + { + // Recommend the user to downgrade to the current version. + error!( + "{}", + format!( + "Your gloo version is too old. Run 'gloo update' to get version {}.", + version + ) + .red() + ); + std::process::exit(1); + } + let output_dir = yaml["output_dir"] + .as_str() + .expect("gloo.yaml seems to be misconfigured. Failed to find the output_dir field."); + let gloo_dir = yaml["gloo_dir"] + .as_str() + .expect("gloo.yaml seems to be misconfigured. Failed to find the gloo_dir field."); + + ( + yaml.clone(), + fs::canonicalize(current_dir.join("gloo.yaml")).unwrap(), + fs::canonicalize(current_dir.join(output_dir)).unwrap(), + fs::canonicalize(current_dir.join(gloo_dir)).unwrap(), + ) +} + +fn build_command(_build_matches: &clap::ArgMatches) { + let (yaml, yaml_path, output_path, gloo_path) = load_and_parse_yaml(); + + let output_dir = output_path.to_str().unwrap().to_string(); + let gloo_dir = gloo_path.to_str().unwrap().to_string(); + + if !gloo_path.exists() { + error!( + "gloo directory not found at path: {}. Have you run gloo init?", + gloo_path.display() + ); + return; + } + + fs::create_dir_all(&output_path).unwrap_or_else(|_| { + error!( + "Failed to create directory at path: {}", + output_path.display() + ); + std::process::exit(1); + }); + + // Read the files from the gloo directory + let data = match utils::read_directory(&gloo_dir) { + Ok(val) => val, + Err(e) => { + error!("{}", e); + return; + } + }; + + // Convert the filenames and contents into C-compatible strings + let filenames_cstr: Vec = data + .iter() + .map(|(name, _)| std::ffi::CString::new(name.as_str()).unwrap()) + .collect(); + + let contents_cstr: Vec = data + .iter() + .map(|(_, content)| std::ffi::CString::new(content.as_str()).unwrap()) + .collect(); + + // Convert the CStrings into raw pointers + let filenames_ptrs: Vec<*const libc::c_char> = + filenames_cstr.iter().map(|cstr| cstr.as_ptr()).collect(); + + let contents_ptrs: Vec<*const libc::c_char> = + contents_cstr.iter().map(|cstr| cstr.as_ptr()).collect(); + + let output_dir_cstr = std::ffi::CString::new(output_dir.clone()).unwrap(); + let output_dir_ptr = output_dir_cstr.as_ptr() as *const i8; + let mut error_msg = [0u8; 256]; + let result = unsafe { + receive_data( + output_dir_ptr, + filenames_ptrs.as_ptr(), + contents_ptrs.as_ptr(), + data.len() as libc::c_int, + error_msg.as_mut_ptr() as *mut libc::c_char, + ) + }; + + // If result is 0, then the build was successful + // update the gloo.yaml file with the current version + if result == 0 { + let mut yaml_hash = yaml.as_hash().unwrap().clone(); + yaml_hash.insert( + Yaml::String("version".to_string()), + Yaml::String(env!("CARGO_PKG_VERSION").to_string()), + ); + let updated_yaml = Yaml::Hash(yaml_hash); + // Write the YAML document to gloo.yaml in the current directory + let mut file = File::create(yaml_path).unwrap(); + let mut out_str = String::new(); + { + let mut emitter = YamlEmitter::new(&mut out_str); + emitter.dump(&updated_yaml).unwrap(); // dump the YAML object to a String + } + file.write_all(out_str.as_bytes()).unwrap(); + } + + match result { + 0 => { + // Print in green + println!("Build complete. See: {}", output_dir.green()); + } + _ => { + let msg = unsafe { + std::ffi::CStr::from_ptr(error_msg.as_ptr() as *const i8) + .to_string_lossy() + .into_owned() + }; + error!("{}", msg); + } + } +} + +fn update_command() -> Result<(), &'static str> { + if cfg!(debug_assertions) { + return Err("This command is disabled for non-release builds."); + } + use std::process::Command; + + println!("{}", "Updating gloo dependencies...".dimmed()); + + if cfg!(target_os = "macos") { + let output = Command::new("brew") + .arg("tap") + .arg("gloohq/gloo") + .output() + .expect("Failed to tap gloo in brew."); + + if !output.status.success() { + return Err("Failed to tap gloo in brew."); + } + + let output = Command::new("brew") + .arg("update") + .output() + .expect("Failed to update brew"); + + if !output.status.success() { + return Err("Failed to update brew."); + } + + let output = Command::new("brew") + .arg("upgrade") + .arg("gloo") + .output() + .expect("Failed to upgrade gloo"); + + if !output.status.success() { + return Err("Failed to upgrade gloo."); + } + } else if cfg!(target_os = "windows") { + let output = Command::new("scoop") + .arg("update") + .output() + .expect("Failed to update scoop"); + + if !output.status.success() { + return Err("Failed to install gloo with scoop."); + } + + let output = Command::new("scoop") + .arg("update") + .arg("gloo") + .output() + .expect("Failed to upgrade gloo"); + if !output.status.success() { + return Err("Failed to upgrade gloo."); + } + } else if cfg!(target_os = "linux") { + let output = Command::new("sh") + .arg("-c") + .arg("curl -fsSL https://raw.githubusercontent.com/GlooHQ/homebrew-gloo/main/install-gloo.sh | bash") + .output() + .expect("Failed to execute command"); + + if !output.status.success() { + return Err("Failed to install gloo with curl."); + } + } else { + return Err("Unsupported operating system for update command."); + } + + // TODO: print out new version. + let output = Command::new("gloo") + .arg("-V") + .output() + .expect("Failed to get gloo version."); + + match add_gloo_lib() { + Ok(_) => (), + Err(e) => { + println!("{}", format!("Failed to update gloo-lib: {}", e).red()); + } + } + + let version = String::from_utf8_lossy(&output.stdout); + println!("{}", format!("New version: {}", version).green()); + + Ok(()) +} + +fn main() { + pretty_env_logger::init(); + + let matches = App::new("gloo") + .version(env!("CARGO_PKG_VERSION")) + .author("Gloo ") + .about("Prisma for ML") + .subcommand(App::new("init").about("Initializes your project to use Gloo")) + .subcommand(App::new("build").about("Builds the project")) + .subcommand(App::new("update").about("Updates Gloo")) + .get_matches(); + + match matches.subcommand() { + ("init", Some(init_matches)) => init_command(init_matches), + ("build", Some(build_matches)) => build_command(build_matches), + ("update", Some(_)) => match update_command() { + Ok(_) => { + println!("{}", "Gloo has been successfully updated.".green()); + } + Err(e) => { + println!("{}", e.red()); + return; + } + }, + + _ => { + error!("Invalid command. Try `gloo --help` for more information."); + } + } +} diff --git a/cli/src/utils.rs b/cli/src/utils.rs new file mode 100644 index 000000000..f77cbe2ea --- /dev/null +++ b/cli/src/utils.rs @@ -0,0 +1,69 @@ +use std::fs; +use std::path::{Path, PathBuf}; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum DirReadError { + #[error("Failed to read directory: {0}")] + ReadDirectoryError(PathBuf), + + #[error("Failed to read file: {0}")] + ReadFileError(PathBuf), + + #[error("Missing main.gloo: {0}/main.gloo")] + MissingTopLevelGlooFile(PathBuf), +} + +impl From for DirReadError { + fn from(_: std::io::Error) -> Self { + DirReadError::ReadDirectoryError(PathBuf::new()) // Default to ReadDirectoryError + } +} + +pub fn read_directory(directory: &str) -> Result, DirReadError> { + let dir_path = Path::new(directory); + + if !dir_path.exists() { + return Err(DirReadError::ReadDirectoryError(dir_path.to_path_buf())); + } + + // Check if root.gloo exists in the root directory + if !dir_path.join("main.gloo").exists() { + return Err(DirReadError::MissingTopLevelGlooFile( + dir_path.to_path_buf(), + )); + } + + let mut map = Vec::new(); + traverse_directory(dir_path, dir_path, &mut map)?; + Ok(map) +} + +fn traverse_directory( + base_dir: &Path, + current_dir: &Path, + map: &mut Vec<(String, String)>, +) -> Result<(), DirReadError> { + let entries = fs::read_dir(current_dir) + .map_err(|_| DirReadError::ReadDirectoryError(current_dir.to_path_buf()))?; + + for entry in entries { + let entry = + entry.map_err(|_| DirReadError::ReadDirectoryError(current_dir.to_path_buf()))?; + let path = entry.path(); + + if path.is_dir() { + traverse_directory(base_dir, &path, map)?; + } else if path.extension().map_or(false, |ext| ext == "gloo") { + let content = + fs::read_to_string(&path).map_err(|_| DirReadError::ReadFileError(path.clone()))?; + let relative_path = path + .strip_prefix(base_dir) + .map(|p| p.to_string_lossy().into_owned()) + .unwrap_or_else(|_| path.display().to_string()); + map.push((relative_path, content)); + } + } + + Ok(()) +} diff --git a/client-tests/run.sh b/client-tests/run.sh new file mode 100644 index 000000000..e69de29bb diff --git a/client-tests/test1/python/.gitignore b/client-tests/test1/python/.gitignore new file mode 100644 index 000000000..7dc1ebda6 --- /dev/null +++ b/client-tests/test1/python/.gitignore @@ -0,0 +1,174 @@ +# Created by https://www.toptal.com/developers/gitignore/api/python +# Edit at https://www.toptal.com/developers/gitignore?templates=python + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +# End of https://www.toptal.com/developers/gitignore/api/python \ No newline at end of file diff --git a/client-tests/test1/python/app/__init__.py b/client-tests/test1/python/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/client-tests/test1/python/app/main.py b/client-tests/test1/python/app/main.py new file mode 100644 index 000000000..293750c89 --- /dev/null +++ b/client-tests/test1/python/app/main.py @@ -0,0 +1,34 @@ +import asyncio +from generated.clients import AZURE_DEFAULT +from generated.functions import TopicRouter +from gloo_py import trace + + +@trace() +async def test_azure_default(): + response = await AZURE_DEFAULT.run( + "customer-service", + prompt=[ + { + "role": "system", + "content": "Address the users questions to the bset of your abilities.", + }, + {"role": "user", "content": "I'm having trouble with my computer."}, + ], + ) + return response + + +@trace() +async def call_topic_router(): + response = await TopicRouter("v1", "I'm having trouble with my computer.") + return response + + +@trace() +async def main(): + await asyncio.gather(test_azure_default(), call_topic_router()) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/client-tests/test1/python/gloo.yaml b/client-tests/test1/python/gloo.yaml new file mode 100644 index 000000000..afe105fec --- /dev/null +++ b/client-tests/test1/python/gloo.yaml @@ -0,0 +1,4 @@ +--- +output_dir: "./generated" +gloo_dir: "./src" +version: 0.2.3 diff --git a/client-tests/test1/python/pyproject.toml b/client-tests/test1/python/pyproject.toml new file mode 100644 index 000000000..65a77c35b --- /dev/null +++ b/client-tests/test1/python/pyproject.toml @@ -0,0 +1,15 @@ +[tool.poetry] +name = "example" +version = "0.1.0" +description = "" +authors = ["Vaibhav Gupta "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.8" +gloo-lib = { path = "../py-gen", develop = true } +mypy = "^1.5.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/client-tests/test1/python/src b/client-tests/test1/python/src new file mode 120000 index 000000000..5cd551cf2 --- /dev/null +++ b/client-tests/test1/python/src @@ -0,0 +1 @@ +../src \ No newline at end of file diff --git a/client-tests/test1/src/clients.gloo b/client-tests/test1/src/clients.gloo new file mode 100644 index 000000000..b2f427c29 --- /dev/null +++ b/client-tests/test1/src/clients.gloo @@ -0,0 +1,33 @@ +@client[llm] AZURE_DEFAULT { + @provider openai + @fallback AZURE_YES_NO + model gpt-3.5-turbo + api_key @ENV.OPENAI_API_KEY + request_timeout 45 + max_tokes 400 +} + +@client[llm] AZURE_GPT4 { + @provider openai + @fallback AZURE_DEFAULT + api_key @ENV.OPENAI_API_KEY + model gpt-3.5-turbo + request_timeout 45 + max_tokens 400 +} + +@client[llm] AZURE_YES_NO { + @provider openai + api_key @ENV.OPENAI_API_KEY + model gpt-3.5-turbo + request_timeout 45 + max_tokens 400 +} + +@client[llm] LARGE_RESPONSE { + @provider openai + api_key @ENV.OPENAI_API_KEY + model gpt-3.5-turbo + request_timeout 45 + max_tokens 400 +} diff --git a/client-tests/test1/src/complaints_pipeline.gloo b/client-tests/test1/src/complaints_pipeline.gloo new file mode 100644 index 000000000..e69de29bb diff --git a/client-tests/test1/src/main.gloo b/client-tests/test1/src/main.gloo new file mode 100644 index 000000000..78180700c --- /dev/null +++ b/client-tests/test1/src/main.gloo @@ -0,0 +1,50 @@ +@enum MessageSender { + AI + RESIDENT +} + +@class Message { + sender MessageSender + body string + + @method as_str { + @lang[py] { + @property + def as_str(self) -> str: + return f'{self.sender}: {self.body}' + } + } +} + +@class Conversation { + thread Message[] + + @method as_str { + @lang[py] { + @property + def as_str(self) -> str: + history = '\n'.join(map(lambda m: m.as_str, self.thread)) + return f"Conversation:\n{history}" + } + } + + + @method most_recent_message { + @lang[py] { + @property + def most_recent_message(self) -> str: + if len(self.thread) <= 0: return "" + return self.thread[-1].as_str + } + } + + @method historical_messages { + @lang[py] { + @property + def historical_messages(self) -> str: + if len(self.thread) <= 1: return "" + history = '\n'.join(map(lambda m: m.as_str, self.thread[:-1])) + return f"Prior Conversation:\n{history}" + } + } +} diff --git a/client-tests/test1/src/main_pipeline/message_simplifier.gloo b/client-tests/test1/src/main_pipeline/message_simplifier.gloo new file mode 100644 index 000000000..aa7c5df3d --- /dev/null +++ b/client-tests/test1/src/main_pipeline/message_simplifier.gloo @@ -0,0 +1,91 @@ +@function MessageSimplifier { + @input Conversation + @output string +} + +@test_group group1 for MessageSimplifier { + @case single_message { + @input { + Conversation(thread=[ + Message( + sender=MessageSender.AI, + body='Hi, how can i help you today?' + ) + ]) + } + } + @input { + Conversation(thread=[ + Message( + sender=MessageSender.AI, + body='Hi, how can i help you today?' + ), + Message( + sender=MessageSender.RESIDENT, + body="I'm having an issue with my water system" + ), + ]) + } + @case double_message { + @input { + Conversation(thread=[ + Message( + sender=MessageSender.RESIDENT, + body="I'm having an issue with my water system" + ), + Message( + sender=MessageSender.AI, + body="Oh no! I've reported the issue to the manager." + ), + Message( + sender=MessageSender.RESIDENT, + body="Thanks!" + ), + Message( + sender=MessageSender.RESIDENT, + body="I want to build a garden. Are there any rules that apply?" + ), + ]) + } + } +} + +@variant[llm] v1 for MessageSimplifier { + @client[llm] AZURE_DEFAULT + + @method custom_vars { + @lang[py] { + def custom_vars() -> typing.Dict[str, str]: + return { + "example_1": """\ + Prior Conversations: + Human: What are the vehicles I have registered? + AI: You have a white toyota prius 2015. + + Most Recent Message: + Human: My Neighbor is an ass. + + Simplified message: + Human: I'd like to file a complaint about my neighbor + """ + } + } + } + + @prompt { + Given a chat conversation between a human and ai + simplify the most recent message from the human into a single sentence that includes all prior relevant context. Don't include any previously answered questions. + + {@example_1} + {@input.historical_messages} + + Most Recent Message: + {@input.most_recent_message} + + Simplified message: + Human: + } +} + +@variant[code] v2 for MessageSimplifier { +} diff --git a/client-tests/test1/src/main_pipeline/text_polisher.gloo b/client-tests/test1/src/main_pipeline/text_polisher.gloo new file mode 100644 index 000000000..54a4949fc --- /dev/null +++ b/client-tests/test1/src/main_pipeline/text_polisher.gloo @@ -0,0 +1,65 @@ +@class ProposedMessage { + thread Conversation + generated_response string +} + +@class ImprovedResponse { + should_improve bool + improved_response string? +} + +@function MaybePolishText { + @input ProposedMessage + @output ImprovedResponse +} + +@function TextPolisher { + @input ProposedMessage + @output string +} + +@variant[code] v1 for TextPolisher { + @depends_on { + MaybePolishText + } + @method impl { + @lang[py] { + async def impl(input: InputType) -> OutputType: + if len(input.thread.thread) > 1: + try: + res = await MaybePolishText('v1_AZURE_DEFAULT', input) + if res.should_improve and res.improved_response: + return res.improved_response + except: + pass + return input.generated_response + } + } +} + +@variant[llm] v1 for MaybePolishText { + @client[llm] AZURE_GPT4 AZURE_DEFAULT + + @stringify ImprovedResponse { + should_improve @describe{false if the response is already contextual and pleasant} + improved_response @describe{string if should_improve else null} + } + + @prompt { + Given a conversation with a resident, consider improving the response previously shown. + + Good responses are amiable and direct. + + Do not use affirmative or negative unless the question is a yes or no question. + + Thread until now: + {@input.conversation.as_str} + + Previous Response: {@input.generated_response} + + Output JSON: + {@ImprovedResponse.json} + + JSON: + } +} diff --git a/client-tests/test1/src/main_pipeline/topic_router.gloo b/client-tests/test1/src/main_pipeline/topic_router.gloo new file mode 100644 index 000000000..abd1d1420 --- /dev/null +++ b/client-tests/test1/src/main_pipeline/topic_router.gloo @@ -0,0 +1,59 @@ +@enum Topic { + ANSWERING_SYSTEM + ACCOUNT_BALANCE + VEHICLE_REGISTRATION + VIOLATIONS + PAYMENT_HISTORY + PAYMENT_METHOD + REQUESTED_ESCALATION + UPDATE_CONTACT_INFO + MAINTENANCE_REQUEST + LANDSCAPING_REQUEST + COMPLAINTS + NO_REPLY + OTHER +} + +@function TopicRouter { + @input string + @output Topic +} + +@test_group AnsweringSystem for TopicRouter { + @method validate { + @lang[py] { + def validate(input: InputType, output: OutputType): + assert output == Topic.ANSWERING_SYSTEM + } + } + @input What's the pet policy? + @input Am I allowed to have a 70-pound rottweiler? + @input How do I pay my dues? + @input How do I pay my assessments? +} + +@test_group AccountBalance for TopicRouter { + @method validate { + @lang[py] { + def validate(input: InputType, output: OutputType): + assert output == Topic.ACCOUNT_BALANCE + } + } + @input What is my balance + @input what is my account balance? + @input I just wanna know how much do I have left +} + +@test_group VehicleRegistration for TopicRouter { + @method validate { + @lang[py] { + def validate(input: InputType, output: OutputType): + assert output == Topic.VEHICLE_REGISTRATION + } + } + @input What vehicles do I own? + @input Are my vehicles on record with the HOA? + @input Can you confirm the vehicles I've registered with the association? + @input How many and which vehicles do I have listed with the community? + @input Can you tell me what vehicles I have registered with the HOA? +} diff --git a/client-tests/test1/src/main_pipeline/topic_router_variant.gloo b/client-tests/test1/src/main_pipeline/topic_router_variant.gloo new file mode 100644 index 000000000..19c563eb2 --- /dev/null +++ b/client-tests/test1/src/main_pipeline/topic_router_variant.gloo @@ -0,0 +1,65 @@ +@variant[llm] v1 for TopicRouter { + @client[llm] AZURE_DEFAULT + + @stringify Topic { + ANSWERING_SYSTEM @rename{SEARCH_COMMUNITY_DOCS} + @describe{questions about the resident portal, office information, office hours, account status, rent payment information, fees to be paid on time, renovations and alterations, landscaping, resident maintenance responsibilities, emergencies, pets, roommates, moving out, security deposits, the resident's lease, noise levels, utilities, cable, pool, amenities, key fobs, and general parking policy} + + ACCOUNT_BALANCE + @describe{questions about account balance, current balance, or balance. Input may be a partial or fully formed question} + + VEHICLE_REGISTRATION + @describe{questions about a resident's vehicles, cars, parking pass details, or anything related to their own methods of transportation} + + VIOLATIONS @skip + PAYMENT_HISTORY @skip + PAYMENT_METHOD @skip + + REQUESTED_ESCALATION @rename{MANAGER_REQUESTED} + @describe{when the user has requested to speak to a manager} + + UPDATE_CONTACT_INFO + @describe{When the user has requested to update their address or contact information} + + MAINTENANCE_REQUEST @rename{MAINTENANCE_ISSUES} + @describe{complaints related to common area maintenance, gate issues, clubhouse issues, pool issues, road issues, or anything related to the maintenance of the resident's property} + + LANDSCAPING_REQUEST @rename{LANDSCAPING_ISSUES} + @describe{complaints related to adding, removing, or changing the lawn, shrubbery, weeds, or any other plants} + + COMPLAINTS @rename{RESIDENT_ISSUES} + @describe{complaints related to neighbors, parking or noise} + + NO_REPLY + @describe{questions/messages that do not require a reply (thank you's, hi's, etc.)} + + OTHER + @describe{questions that do not fit into any of the other categories} + } + + @prompt { + Given the user query, determine the output classification so I can route them to the right context. + + ResidentTopic: + {@Topic.values} + + + INPUT: {@input} + + OUTPUT: + { + "reasoning": "clues ", + "topic": ResidentTopic, + "why": string + } + + JSON: + } + + @method parser_middleware { + @lang[py] { + def parser_middleware(llm_response: str) -> str: + return json.loads(llm_response).get('topic') + } + } +} diff --git a/client-tests/test1/src/maintence_pipeline.gloo b/client-tests/test1/src/maintence_pipeline.gloo new file mode 100644 index 000000000..0519ecba6 --- /dev/null +++ b/client-tests/test1/src/maintence_pipeline.gloo @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/clients/python/.gitignore b/clients/python/.gitignore new file mode 100644 index 000000000..8b25435b8 --- /dev/null +++ b/clients/python/.gitignore @@ -0,0 +1,4 @@ +__pycache__ + +dist/ +.venv/ \ No newline at end of file diff --git a/clients/python/.python-version b/clients/python/.python-version new file mode 100644 index 000000000..cc1923a40 --- /dev/null +++ b/clients/python/.python-version @@ -0,0 +1 @@ +3.8 diff --git a/clients/python/gloo_internal/__init__.py b/clients/python/gloo_internal/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/gloo_internal/api.py b/clients/python/gloo_internal/api.py new file mode 100644 index 000000000..89b65d22e --- /dev/null +++ b/clients/python/gloo_internal/api.py @@ -0,0 +1,247 @@ +from __future__ import annotations +import atexit +import datetime + +import http +import typing + +import aiohttp +import pydantic +import requests + +from . import api_types +from .env import ENV +from .logging import logger + +T = typing.TypeVar("T", bound=pydantic.BaseModel) +U = typing.TypeVar("U", bound=pydantic.BaseModel) + + +class _APIWrapper: + def __init__(self) -> None: + self.__base_url: None | str = None + self.__project_id: None | str = None + self.__headers: None | typing.Dict[str, str] = None + + @property + def base_url(self) -> str: + if self.__base_url is None: + try: + self.__base_url = ENV.GLOO_BASE_URL + except Exception: + self.__base_url = "https://app.trygloo.com/api" + return self.__base_url + + @property + def project_id(self) -> str: + if self.__project_id is None: + try: + self.__project_id = ENV.GLOO_APP_ID + except Exception: + self.__project_id = "" + return self.__project_id + + @property + def key(self) -> str | None: + try: + return ENV.GLOO_APP_SECRET + except Exception: + return None + + @property + def headers(self) -> typing.Dict[str, str]: + if self.__headers is None: + self.__headers = { + "Content-Type": "application/json", + } + if self.key: + self.__headers["Authorization"] = f"Bearer {self.key}" + return self.__headers + + def _call_api_sync( + self, endpoint: str, payload: T, parser: typing.Type[U] | None = None + ) -> U | None: + data = payload.model_dump(by_alias=True) + response = requests.post( + f"{self.base_url}/{endpoint}", json=data, headers=self.headers + ) + if response.status_code != http.HTTPStatus.OK: + text = response.text + raise Exception(f"Failed with status code {response.status_code}: {text}") + if parser: + return parser.model_validate_json(response.text) + else: + return None + + async def _call_api( + self, endpoint: str, payload: T, parser: typing.Type[U] | None = None + ) -> U | None: + async with aiohttp.ClientSession() as session: + data = payload.model_dump(by_alias=True) + async with session.post( + f"{self.base_url}/{endpoint}", headers=self.headers, json=data + ) as response: + if response.status != 200: + text = await response.text() + raise Exception( + f"Failed with status code {response.status}: {text}" + ) + if parser: + return parser.model_validate_json(await response.text()) + else: + return None + + +class __APIBase: + def __init__(self, *, base: _APIWrapper) -> None: + self.__base = base + + @property + def project_id(self) -> str: + return self.__base.project_id + + def _call_api_sync( + self, endpoint: str, payload: T, parser: typing.Type[U] | None = None + ) -> U | None: + return self.__base._call_api_sync(endpoint, payload, parser) + + async def _call_api( + self, endpoint: str, payload: T, parser: typing.Type[U] | None = None + ) -> U | None: + return await self.__base._call_api(endpoint, payload, parser) + + +class TestingAPIWrapper(__APIBase): + def __init__(self, base: _APIWrapper) -> None: + super().__init__(base=base) + + async def create_session(self) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + response = await self._call_api( + "tests/create-cycle", + api_types.CreateCycleRequest( + project_id=self.project_id, session_id=ENV.GLOO_PROCESS_ID + ), + api_types.CreateCycleResponse, + ) + if response: + logger.info(f"\033[94mSee test results at: {response.dashboard_url}\033[0m") + + async def create_cases(self, *, payload: api_types.CreateTestCase) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + payload.project_id = self.project_id + payload.test_cycle_id = ENV.GLOO_PROCESS_ID + await self._call_api("tests/create-case", payload=payload) + + async def update_case(self, *, payload: api_types.UpdateTestCase) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + payload.project_id = self.project_id + payload.test_cycle_id = ENV.GLOO_PROCESS_ID + await self._call_api("tests/update", payload=payload) + + def update_case_sync(self, *, payload: api_types.UpdateTestCase) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + payload.project_id = self.project_id + payload.test_cycle_id = ENV.GLOO_PROCESS_ID + self._call_api_sync("tests/update", payload=payload) + + +class ProcessAPIWrapper(__APIBase): + def __init__(self, base: _APIWrapper) -> None: + super().__init__(base=base) + + def start(self) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + response = self._call_api_sync( + "process/start", + api_types.StartProcessRequest( + project_id=self.project_id, + session_id=ENV.GLOO_PROCESS_ID, + stage=ENV.GLOO_STAGE, + hostname=ENV.GLOO_HOSTNAME, + start_time=datetime.datetime.utcnow().isoformat() + "Z", + tags={ + # TODO: Get git information (e.g. what branch we're on) + }, + ), + api_types.CreateCycleResponse, + ) + if response: + logger.info(f"\033[94mSee test results at: {response.dashboard_url}\033[0m") + + def end(self) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + self._call_api_sync( + "process/end", + api_types.EndProcessRequest( + project_id=self.project_id, + session_id=ENV.GLOO_PROCESS_ID, + end_time=datetime.datetime.utcnow().isoformat() + "Z", + ), + ) + + +class APIWrapper(__APIBase): + def __init__(self) -> None: + wrapper = _APIWrapper() + super().__init__(base=wrapper) + self.test = TestingAPIWrapper(base=wrapper) + self.process = ProcessAPIWrapper(base=wrapper) + + async def check_cache( + self, *, payload: api_types.CacheRequest + ) -> api_types.CacheResponse | None: + if not (ENV.GLOO_STAGE == "test" or ENV.GLOO_CACHE == "1"): + # logger.warning("Caching not enabled. SET GLOO_CACHE=1 to enable.") + return None + + if not self.project_id: + return None + + payload.project_id = self.project_id + try: + return await self._call_api("cache", payload, api_types.CacheResponse) + except Exception: + return None + + async def log( + self, + *, + payload: api_types.LogSchema, + ) -> None: + if not self.project_id: + logger.warning("GLOO_APP_ID not set, dropping log.") + return + + try: + payload.project_id = self.project_id + await self._call_api("log/v2", payload) + except Exception as e: + event_name = payload.context.event_chain[-1].function_name + if payload.context.event_chain[-1].variant_name: + event_name = ( + f"{event_name}::{payload.context.event_chain[-1].variant_name}" + ) + logger.warning(f"Log failure on {event_name}: {e}") + logger.debug(f"Dropped Payload: {payload}") + + +API = APIWrapper() diff --git a/clients/python/gloo_internal/api_types.py b/clients/python/gloo_internal/api_types.py new file mode 100644 index 000000000..cd3c51365 --- /dev/null +++ b/clients/python/gloo_internal/api_types.py @@ -0,0 +1,167 @@ +from __future__ import annotations +from typing import Any, Dict, List, Mapping, Optional, Union +from typing_extensions import TypedDict, Literal + + +from pydantic import BaseModel, Field +from enum import Enum + + +class TypeSchema(BaseModel): + name: str + fields: Any + + +class IOValue(BaseModel): + value: Any + type: TypeSchema + + +class IO(BaseModel): + input: Optional[IOValue] + output: Optional[IOValue] + + +class LLMOutputModelMetadata(BaseModel): + logprobs: Optional[Any] + prompt_tokens: Optional[int] + output_tokens: Optional[int] + total_tokens: Optional[int] + + +class LLMOutputModel(BaseModel): + raw_text: str + metadata: LLMOutputModelMetadata + + +class LLMChat(TypedDict): + role: Literal["assistant", "user", "system"] + content: str + + +class LLMEventInputPrompt(BaseModel): + template: Union[str, List[LLMChat]] + template_args: Dict[str, str] + + +class LLMEventInput(BaseModel): + prompt: LLMEventInputPrompt + invocation_params: Dict[str, Any] + + +class LLMEventSchema(BaseModel): + mdl_name: str = Field(alias="model_name") + provider: str + input: LLMEventInput + output: Optional[LLMOutputModel] + + +class EventChain(BaseModel): + function_name: str + variant_name: Optional[str] + + +class LogSchemaContext(BaseModel): + hostname: str + process_id: str + stage: Optional[str] + latency_ms: Optional[int] + start_time: str + tags: Dict[str, str] + event_chain: List[EventChain] + + +class Error(BaseModel): + code: int + message: str + traceback: Optional[str] + + +MetadataType = LLMEventSchema + + +class LogSchema(BaseModel): + project_id: str + event_type: Literal["log", "func_llm", "func_prob", "func_code"] + root_event_id: str + event_id: str + parent_event_id: Optional[str] + context: LogSchemaContext + io: IO + error: Optional[Error] + metadata: Optional[MetadataType] + + +## Process management +class StartProcessRequest(BaseModel): + project_id: str + session_id: str + stage: str + hostname: str + start_time: str + tags: Mapping[str, str] + + +class EndProcessRequest(BaseModel): + project_id: str + session_id: str + end_time: str + + +### Tests +class CreateCycleRequest(BaseModel): + project_id: str + session_id: str + + +class CreateCycleResponse(BaseModel): + test_cycle_id: str + dashboard_url: str + + +class LogTestTags(BaseModel): + test_cycle_id: str + test_dataset_name: str + test_case_name: str + test_case_arg_name: str + + +class TestCaseStatus(str, Enum): + QUEUED = "QUEUED" + RUNNING = "RUNNING" + PASSED = "PASSED" + FAILED = "FAILED" + CANCELLED = "CANCELLED" + EXPECTED_FAILURE = "EXPECTED_FAILURE" + + +class CreateTestCase(BaseModel): + project_id: str = "" + test_cycle_id: str = "" + test_dataset_name: str + test_name: str + test_case_args: List[Dict[str, str]] + + +class UpdateTestCase(BaseModel): + project_id: str = "" + test_cycle_id: str = "" + test_dataset_name: str + test_case_definition_name: str + test_case_arg_name: str + status: TestCaseStatus + error_data: Optional[Any] + + +class CacheRequest(BaseModel): + project_id: str = "" + provider: str + prompt: Union[str, List[LLMChat]] + prompt_vars: Dict[str, str] + invocation_params: Dict[str, Any] + + +class CacheResponse(BaseModel): + mdl_name: str = Field(alias="model_name") + llm_output: LLMOutputModel + latency_ms: int diff --git a/clients/python/gloo_internal/common.py b/clients/python/gloo_internal/common.py new file mode 100644 index 000000000..1cbefd759 --- /dev/null +++ b/clients/python/gloo_internal/common.py @@ -0,0 +1,21 @@ +from __future__ import annotations +from datetime import datetime +import uuid + + +# Define the named tuple 'Event' +class EventBase: + func_name: str + variant_name: str | None + timestamp: datetime + event_id: str + parent_event_id: str | None + + def __init__( + self, *, func_name: str, variant_name: str | None, parent_event_id: str | None + ): + self.func_name = func_name + self.variant_name = variant_name + self.event_id = str(uuid.uuid4()) + self.timestamp = datetime.utcnow() + self.parent_event_id = parent_event_id diff --git a/clients/python/gloo_internal/context_manager.py b/clients/python/gloo_internal/context_manager.py new file mode 100644 index 000000000..c1d240710 --- /dev/null +++ b/clients/python/gloo_internal/context_manager.py @@ -0,0 +1,96 @@ +import abc +from textwrap import dedent +import typing +from .llm_client import LLMClient +from .tracer import trace + +InputType = typing.TypeVar("InputType") +OutputType = typing.TypeVar("OutputType") + +T = typing.TypeVar("T") + + +class GlooVariant(typing.Generic[InputType, OutputType]): + __func_name: str + __name: str + + def __init__(self, *, func_name: str, name: str): + self.__func_name = func_name + self.__name = name + + @property + def name(self) -> str: + return self.__name + + @property + def func_name(self) -> str: + return self.__func_name + + @abc.abstractmethod + async def _run(self, arg: InputType) -> OutputType: + raise NotImplementedError + + async def run(self, arg: InputType) -> OutputType: + response = await trace(_name=self.func_name, _tags={"__variant": self.name})( + self._run + )(arg) + return response + + +class CodeVariant(GlooVariant[InputType, OutputType]): + __func: typing.Callable[[InputType], typing.Awaitable[OutputType]] + + def __init__( + self, + func_name: str, + name: str, + *, + func: typing.Callable[[InputType], typing.Awaitable[OutputType]], + ): + super().__init__(func_name=func_name, name=name) + self.__func = func + + async def _run(self, arg: InputType) -> OutputType: + return await self.__func(arg) + + +class LLMVariant(GlooVariant[InputType, OutputType]): + __prompt: str + __client: LLMClient + + def __init__( + self, + func_name: str, + name: str, + *, + prompt: str, + client: LLMClient, + prompt_vars: typing.Callable[ + [InputType], typing.Awaitable[typing.Dict[str, str]] + ], + parser: typing.Callable[[str], typing.Awaitable[OutputType]], + ): + super().__init__(func_name=func_name, name=name) + self.__prompt = prompt + self.__client = client + self.__prompt_vars = prompt_vars + self.__parser = parser + + async def _run(self, arg: InputType) -> OutputType: + prompt_vars = await self.__prompt_vars(arg) + + # Determine which prompt vars are used in the prompt string. + # format is {@var_name} + used_vars = set() + for var_name in prompt_vars: + if f"{{@{var_name}}}" in self.__prompt: + used_vars.add(var_name) + + # If there are unused vars, log a warning + prompt_vars_copy = { + var_name: dedent(prompt_vars[var_name].lstrip("\n").rstrip()) + for var_name in used_vars + } + + response = await self.__client._run(self.__prompt, vars=prompt_vars_copy) + return await self.__parser(response) diff --git a/clients/python/gloo_internal/env.py b/clients/python/gloo_internal/env.py new file mode 100644 index 000000000..72b409653 --- /dev/null +++ b/clients/python/gloo_internal/env.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import os +import typing +import uuid +import platform +import dotenv + +dotenv.load_dotenv(dotenv_path=dotenv.find_dotenv(usecwd=True)) + + +class EnvVars: + __var_list: typing.Dict[str, None | str] + + def __init__(self, var_list: typing.List[typing.Tuple[str, str] | str]): + # List of environment variables you're interested in. + # This can be predefined or passed during instantiation. + self.__dict__["__var_list"] = { + (var if isinstance(var, str) else var[0]): None + if isinstance(var, str) + else var[1] + for var in var_list + } + + @property + def var_list(self) -> typing.Dict[str, None | str]: + """Get the list of environment variables.""" + return typing.cast( + typing.Dict[str, typing.Optional[str]], self.__dict__["__var_list"] + ) + + def __getattr__(self, key: str) -> str: + """Get the value of an environment variable when accessed as an attribute.""" + default_val = self.var_list.get(key, None) + val = os.environ.get(key, default_val) + if val is None: + raise ValueError(f"'{key}' must be set via CLI.") + return val + + def __setattr__(self, key: str, value: str) -> None: + """Set the value of an environment variable when accessed as an attribute.""" + assert isinstance(key, str), f"{key} must be a string." + assert isinstance(value, str), f"{key}: {value} must be a string." + + if key in self.__var_list: + os.environ[key] = value + else: + raise ValueError(f"'{key}' must be set via CLI.") + + def list_all(self) -> typing.Dict[str, None | str]: + """List all environment variables specified in the var_list.""" + return {key: os.environ.get(key, None) for key in self.__dict__["__var_list"]} + + def __str__(self) -> str: + """String representation of the environment variables.""" + return str(self.list_all()) + + +ENV = EnvVars( + var_list=[ + "GLOO_BASE_URL", + ("GLOO_PROCESS_ID", str(uuid.uuid4())), + ("HOSTNAME", platform.node()), + "GLOO_APP_ID", + "GLOO_APP_SECRET", + "OPENAI_API_KEY", + ("GLOO_CAPTURE_CODE", "false"), + ("GLOO_STAGE", "prod"), + ("GLOO_CACHE", "0"), + ] +) + +__all__ = ["ENV"] diff --git a/clients/python/gloo_internal/llm_client.py b/clients/python/gloo_internal/llm_client.py new file mode 100644 index 000000000..b8856de4b --- /dev/null +++ b/clients/python/gloo_internal/llm_client.py @@ -0,0 +1,9 @@ +from .llm_clients.base_client import LLMClient +from .llm_clients.openai_client import OpenAILLMClient +from .llm_clients.anthropic_client import AnthropicLLMClient + +__all__ = [ + "LLMClient", + "OpenAILLMClient", + "AnthropicLLMClient", +] diff --git a/clients/python/gloo_internal/llm_clients/__init__.py b/clients/python/gloo_internal/llm_clients/__init__.py new file mode 100644 index 000000000..069c8e57a --- /dev/null +++ b/clients/python/gloo_internal/llm_clients/__init__.py @@ -0,0 +1,8 @@ +from .base_client import LLMClient +from .factory import register_llm_client, llm_client_factory + +__all__ = [ + "LLMClient", + "register_llm_client", + "llm_client_factory", +] diff --git a/clients/python/gloo_internal/llm_clients/anthropic_client.py b/clients/python/gloo_internal/llm_clients/anthropic_client.py new file mode 100644 index 000000000..e516f8e90 --- /dev/null +++ b/clients/python/gloo_internal/llm_clients/anthropic_client.py @@ -0,0 +1,119 @@ +from __future__ import annotations +import typing +import anthropic + +from .base_client import LLMClient +from .factory import register_llm_client +from .. import api_types + + +@register_llm_client("anthropic") +class AnthropicLLMClient(LLMClient): + def __init__(self, provider: str, **kwargs: typing.Any) -> None: + if "max_tokens_to_sample" not in kwargs: + kwargs["max_tokens_to_sample"] = 300 + if "model" not in kwargs: + assert False, "AnthropicLLMClient requires a model" + + if "max_retries" in kwargs and "__retry" in kwargs: + assert False, "Cannot specify both max_retries and __retry" + + __retry = kwargs.pop("__retry", kwargs.pop("max_retries", 0)) + super().__init__(provider=provider, **kwargs, __retry=__retry) + + client_kwargs = {} + if "api_key" in kwargs: + client_kwargs["api_key"] = kwargs.pop("api_key") + if "auth_token" in kwargs: + client_kwargs["auth_token"] = kwargs.pop("auth_token") + if "base_url" in kwargs: + client_kwargs["base_url"] = kwargs.pop("base_url") + if "timeout" in kwargs: + client_kwargs["timeout"] = kwargs.pop("timeout") + if "default_headers" in kwargs: + client_kwargs["default_headers"] = kwargs.pop("default_headers") + if "default_query" in kwargs: + client_kwargs["default_query"] = kwargs.pop("default_query") + if "transport" in kwargs: + client_kwargs["transport"] = kwargs.pop("transport") + if "proxies" in kwargs: + client_kwargs["proxies"] = kwargs.pop("proxies") + if "connection_pool_limits" in kwargs: + client_kwargs["connection_pool_limits"] = kwargs.pop( + "connection_pool_limits" + ) + if "_strict_response_validation" in kwargs: + client_kwargs["_strict_response_validation"] = kwargs.pop( + "_strict_response_validation" + ) + self.__call_args = kwargs + + self.__client = anthropic.AsyncAnthropic(**client_kwargs, max_retries=0) + + def get_model_name(self) -> str: + # Try some well known keys + return typing.cast(str, self.kwargs["model"]) + + def _exception_to_code(self, e: BaseException) -> int | None: + if isinstance(e, anthropic.APIStatusError): + return e.status_code + return None + + async def _run_chat( + self, chats: typing.List[api_types.LLMChat] + ) -> typing.Tuple[str, api_types.LLMOutputModel]: + messages = "".join( + [ + (anthropic.HUMAN_PROMPT if c["role"] == "user" else anthropic.AI_PROMPT) + + " " + + c["content"] + for c in chats + ] + ) + aprompt_tokens = self.__client.count_tokens(messages) + response: anthropic.types.Completion = await self.__client.completions.create( + prompt=f"{messages}{anthropic.AI_PROMPT}", + **self.kwargs, + ) + + model = response.model + text = response.completion + + output_tokens = await self.__client.count_tokens(text) + prompt_tokens = await aprompt_tokens + + return model, api_types.LLMOutputModel( + raw_text=text, + metadata=api_types.LLMOutputModelMetadata( + logprobs=None, + prompt_tokens=prompt_tokens, + output_tokens=output_tokens, + total_tokens=output_tokens + prompt_tokens, + ), + ) + + async def _run_completion( + self, prompt: str + ) -> typing.Tuple[str, api_types.LLMOutputModel]: + messages = f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}" + aprompt_tokens = self.__client.count_tokens(messages) + response: anthropic.types.Completion = await self.__client.completions.create( + prompt=messages, + **self.__call_args, + ) + + model = response.model + text = response.completion + + output_tokens = await self.__client.count_tokens(text) + prompt_tokens = await aprompt_tokens + + return model, api_types.LLMOutputModel( + raw_text=text, + metadata=api_types.LLMOutputModelMetadata( + logprobs=None, + prompt_tokens=prompt_tokens, + output_tokens=output_tokens, + total_tokens=output_tokens + prompt_tokens, + ), + ) diff --git a/clients/python/gloo_internal/llm_clients/base_client.py b/clients/python/gloo_internal/llm_clients/base_client.py new file mode 100644 index 000000000..4d1f2c2b6 --- /dev/null +++ b/clients/python/gloo_internal/llm_clients/base_client.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +import abc +import re +import traceback +import typing +import aiohttp + +from ..tracer import set_ctx_error, set_llm_metadata, trace, update_trace_tags, get_ctx +from .. import api_types +from ..api import API +from ..logging import logger + + +def hide_secret(kwargs: typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]: + copied = kwargs.copy() + for x in ["api_key", "secret_key", "token", "auth"]: + if x in copied: + copied[x] = copied[x][:4] + "****" + return copied + + +def safe_format(s: str, kwargs: typing.Dict[str, str]) -> str: + for key, value in kwargs.items(): + s = s.replace("{@" + key + "}", value) + # Throw error if there are any remaining placeholders of the form {@key} + if re.search("{@.*?}", s): + raise ValueError(f"Invalid template: {s}") + return s + + +class LLMClient: + def __init__( + self, + provider: str, + __retry: int = 0, + __default_fallback__: typing.Union["LLMClient", None] = None, + __fallback__: typing.Union[typing.Dict[int, "LLMClient"], None] = None, + **kwargs: typing.Any, + ) -> None: + self.__provider = provider + self.__type = str(kwargs.pop("__type", "chat")) + self.__retry = __retry + self.__default_fallback = __default_fallback__ + self.__fallback = __fallback__ + self.__kwargs = kwargs + + @property + def provider(self) -> str: + return self.__provider + + @property + def kwargs(self) -> typing.Dict[str, typing.Any]: + return self.__kwargs + + @property + def type(self) -> str: + return self.__type + + def is_chat(self) -> bool: + return self.type == "chat" + + @abc.abstractmethod + def get_model_name(self) -> str: + raise NotImplementedError + + async def run( + self, + name: str, + *, + prompt: str | typing.List[api_types.LLMChat], + ) -> str: + return await trace(_name=name)(self._run)(prompt_template=prompt) + + async def _run_impl( + self, + prompt_template: str | typing.List[api_types.LLMChat], + vars: typing.Dict[str, str] = {}, + ) -> str: + event = api_types.LLMEventSchema( + provider=self.provider, + model_name=self.get_model_name(), + input=api_types.LLMEventInput( + prompt=api_types.LLMEventInputPrompt( + template=prompt_template, template_args=vars + ), + invocation_params=hide_secret(self.kwargs), + ), + output=None, + ) + + if isinstance(prompt_template, list): + if not self.is_chat(): + raise ValueError("Pre/post prompts are only supported for chat models") + + set_llm_metadata(event) + + cached = await API.check_cache( + payload=api_types.CacheRequest( + provider=self.provider, + prompt=prompt_template, + prompt_vars=vars, + invocation_params=event.input.invocation_params, + ) + ) + + if cached: + model_name = cached.mdl_name + response = cached.llm_output + update_trace_tags(__cached="1", __cached_latency_ms=str(cached.latency_ms)) + else: + if self.is_chat(): + if isinstance(prompt_template, list): + chat_prompt: typing.List[api_types.LLMChat] = [ + {"role": x["role"], "content": safe_format(x["content"], vars)} + for x in prompt_template + ] + else: + chat_prompt = [ + { + "role": "user", + "content": safe_format(prompt_template, vars), + } + ] + logger.info(f"Running {self.provider} with prompt:\n{chat_prompt}") + model_name, response = await self._run_chat(chat_prompt) + else: + assert isinstance(prompt_template, str) + prompt = safe_format(prompt_template, vars) + logger.info(f"Running {self.provider} with prompt:\n{prompt}") + model_name, response = await self._run_completion(prompt) + + logger.info(f"RESPONSE:\n{response.raw_text}") + # Update event with output + event.output = response + event.mdl_name = model_name + return response.raw_text + + async def _run( + self, + prompt_template: str | typing.List[api_types.LLMChat], + vars: typing.Dict[str, str] = {}, + *, + __max_tries__: None | int = None, + ) -> str: + max_tries = ( + self.__retry + 1 + if __max_tries__ is None + else min(self.__retry + 1, __max_tries__) + ) + assert max_tries > 0, "max_tries must be positive" + try: + return await self._run_impl(prompt_template, vars) + except BaseException as e: + formatted_traceback = "".join( + traceback.format_exception(e.__class__, e, e.__traceback__) + ) + set_ctx_error( + api_types.Error( + # TODO: For GlooErrors, we should have a list of error codes. + code=1, # Unknown error. + message=f"{e.__class__.__name__}: {e}", + traceback=formatted_traceback, + ) + ) + maybe_handler = self._handle_exception(max_tries, e) + if maybe_handler is not None: + handler, name = maybe_handler + stk, ctx = get_ctx() + return await trace( + _name=f"{stk.func}[{name}]", + _tags=dict(**ctx.tags), + )(handler._run)( + prompt_template, + vars, + __max_tries__=max_tries - 1 if handler is self else None, + ) + raise e + + def _handle_exception( + self, max_tries: int, e: BaseException + ) -> typing.Optional[typing.Tuple["LLMClient", str]]: + status_code = self._exception_to_code(e) + if self._allow_retry(status_code): + if max_tries - 1 > 1: + return self, f"retry[{max_tries - 1}]" + if self.__fallback and status_code is not None: + fallback = self.__fallback.get(status_code, None) + if fallback is not None: + return fallback, f"fallback[{status_code}]" + if self.__default_fallback: + # Certain status codes are not retriable by default. + if status_code is None or status_code not in [400, 401, 403, 404, 422]: + return self.__default_fallback, "fallback" + return None + + @abc.abstractmethod + def _allow_retry(self, code: int | None) -> bool: + if code is None: + return True + return code not in [400, 401, 403, 404, 422] + + @abc.abstractmethod + def _exception_to_code(self, e: BaseException) -> typing.Optional[int]: + if isinstance(e, aiohttp.ClientError): + return 500 + if isinstance(e, aiohttp.ClientResponseError): + return e.status + return None + + @abc.abstractmethod + async def _run_completion( + self, prompt: str + ) -> typing.Tuple[str, api_types.LLMOutputModel]: + raise NotImplementedError("Client must implement _run_completion method") + + @abc.abstractmethod + async def _run_chat( + self, chats: typing.List[api_types.LLMChat] + ) -> typing.Tuple[str, api_types.LLMOutputModel]: + raise NotImplementedError("Client must implement _run_chat method") diff --git a/clients/python/gloo_internal/llm_clients/factory.py b/clients/python/gloo_internal/llm_clients/factory.py new file mode 100644 index 000000000..df1f9f0b2 --- /dev/null +++ b/clients/python/gloo_internal/llm_clients/factory.py @@ -0,0 +1,54 @@ +from __future__ import annotations +import typing +from .base_client import LLMClient + + +class LLMClientRegistry: + _registry: typing.Dict[str, typing.Type[LLMClient]] = {} + + @classmethod + def register( + cls, provider: str | typing.List[str] + ) -> typing.Callable[[typing.Type[LLMClient]], typing.Type[LLMClient]]: + def decorator(sub_cls: typing.Type[LLMClient]) -> typing.Type[LLMClient]: + if not issubclass(sub_cls, LLMClient): + raise TypeError(f"Registered class must inherit from LLMClient") + if isinstance(provider, str): + if provider in cls._registry: + raise ValueError( + f"LLMClient for provider '{provider}' already registered" + ) + cls._registry[provider] = sub_cls + else: + for p in provider: + if p in cls._registry: + raise ValueError( + f"LLMClient for provider '{p}' already registered" + ) + cls._registry[p] = sub_cls + return sub_cls + + return decorator + + @classmethod + def create_instance( + cls, + *, + provider: str, + __default_fallback__: typing.Union["LLMClient", None] = None, + __fallback__: typing.Union[typing.Dict[int, "LLMClient"], None] = None, + **kwargs: typing.Any, + ) -> LLMClient: + if provider not in cls._registry: + raise ValueError(f"No LLMClient registered for provider '{provider}'") + client_cls = cls._registry[provider] + return client_cls( + provider=provider, + **kwargs, + __default_fallback__=__default_fallback__, + __fallback__=__fallback__, + ) + + +register_llm_client = LLMClientRegistry.register +llm_client_factory = LLMClientRegistry.create_instance diff --git a/clients/python/gloo_internal/llm_clients/openai_client.py b/clients/python/gloo_internal/llm_clients/openai_client.py new file mode 100644 index 000000000..9604559c1 --- /dev/null +++ b/clients/python/gloo_internal/llm_clients/openai_client.py @@ -0,0 +1,59 @@ +import typing +import openai + +from .base_client import LLMClient +from .factory import register_llm_client +from .. import api_types + + +@register_llm_client(["openai", "azure"]) +class OpenAILLMClient(LLMClient): + def __init__(self, provider: str, **kwargs: typing.Any) -> None: + super().__init__(provider=provider, **kwargs) + + def get_model_name(self) -> str: + # Try some well known keys + for key in ["model_name", "model", "engine"]: + if key in self.kwargs: + val = self.kwargs[key] + if isinstance(val, str): + return val.lower() + return "unknown" + + async def _run_chat( + self, chats: typing.List[api_types.LLMChat] + ) -> typing.Tuple[str, api_types.LLMOutputModel]: + assert self.is_chat(), "This method is only for chat models" + + response = await openai.ChatCompletion.acreate(messages=chats, **self.kwargs) # type: ignore + text = response["choices"][0]["message"]["content"] + usage = response["usage"] + model = response["model"] + return model, api_types.LLMOutputModel( + raw_text=text, + metadata=api_types.LLMOutputModelMetadata( + logprobs=None, + prompt_tokens=usage.get("prompt_tokens", None), + output_tokens=usage.get("completion_tokens", None), + total_tokens=usage.get("total_tokens", None), + ), + ) + + async def _run_completion( + self, prompt: str + ) -> typing.Tuple[str, api_types.LLMOutputModel]: + assert not self.is_chat(), "This method is only for completion models" + + response = await openai.Completion.acreate(prompt=prompt, **self.kwargs) # type: ignore + text = response["choices"][0]["text"] + usage = response["usage"] + model = response["model"] + return model, api_types.LLMOutputModel( + raw_text=text, + metadata=api_types.LLMOutputModelMetadata( + logprobs=response["choices"][0]["logprobs"], + prompt_tokens=usage.get("prompt_tokens", None), + output_tokens=usage.get("completion_tokens", None), + total_tokens=usage.get("total_tokens", None), + ), + ) diff --git a/clients/python/gloo_internal/logging.py b/clients/python/gloo_internal/logging.py new file mode 100644 index 000000000..4e91c396e --- /dev/null +++ b/clients/python/gloo_internal/logging.py @@ -0,0 +1,32 @@ +# Set up module-specific logging +import logging +import os +import coloredlogs + +logger = logging.getLogger(__name__) +logger.setLevel(os.environ.get("GLOO_LOG_LEVEL", logging.DEBUG)) + +# Custom field styles for coloredlogs +field_styles = { + "asctime": {"color": "green"}, + "hostname": {"color": "magenta"}, + "levelname": {"color": "white", "bold": True}, + "name": {"color": "blue", "bold": True}, + "programname": {"color": "cyan"}, +} +level_styles = { + "info": {"color": "green"}, + "verbose": {"color": "blue"}, + "warning": {"color": "yellow"}, + "error": {"color": "red"}, + "critical": {"color": "red", "bold": True}, +} + +coloredlogs.install( + level="INFO", + logger=logger, + fmt="%(asctime)s - [GLOO] - %(levelname)s: %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + field_styles=field_styles, + level_styles=level_styles, +) diff --git a/clients/python/gloo_internal/py.typed b/clients/python/gloo_internal/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/gloo_internal/test_context_manager.py b/clients/python/gloo_internal/test_context_manager.py new file mode 100644 index 000000000..7903c2567 --- /dev/null +++ b/clients/python/gloo_internal/test_context_manager.py @@ -0,0 +1,153 @@ +import asyncio +import random +import pytest +import typing + +from . import api_types +from .tracer import trace, update_trace_tags +from .api import API + +from mock import patch, AsyncMock + + +class APIWrapperLogMocker: + def __init__(self, logs_list: typing.List[api_types.LogSchema]) -> None: + self.logs_list = logs_list + + async def __aenter__(self) -> AsyncMock: + self.patcher = patch.object(API, "log", new_callable=AsyncMock) + self.mock_log = self.patcher.start() + self.mock_log.side_effect = lambda payload: self.logs_list.append(payload) + return self.mock_log + + async def __aexit__( + self, exc_type: typing.Any, exc_val: typing.Any, exc_tb: typing.Any + ) -> None: + self.patcher.stop() + + +def validate_log(log: api_types.LogSchema, chain: typing.List[str]) -> None: + assert log.event_type == "func_code" + assert len(log.context.event_chain) == len(chain) + for i, func_name in enumerate(chain): + assert log.context.event_chain[i].function_name == func_name + assert log.context.event_chain[i].variant_name is None + + +@pytest.mark.asyncio +async def test_single_function() -> None: + # Add a mock for APIWrapper.log + + @trace() + async def foo(x: int) -> int: + await asyncio.sleep(0.1) + return x + + logs: typing.List[api_types.LogSchema] = [] + async with APIWrapperLogMocker(logs) as mock_log: + assert await foo(100) == 100 + mock_log.assert_called() + + assert len(logs) == 1 + log = logs[0] + validate_log(log, ["foo"]) + + +@pytest.mark.asyncio +async def test_chained() -> None: + @trace() + async def foo(x: int) -> int: + await asyncio.sleep(0.1 + random.random() / 10) + return x + + @trace() + async def bar(x: typing.List[int]) -> int: + res = await asyncio.gather(*map(foo, x)) + return sum(res) + + logs: typing.List[api_types.LogSchema] = [] + async with APIWrapperLogMocker(logs) as mock_log: + assert await bar([100, 90, 80, 70, 60, 50, 40, 30, 20, 10]) == 550 + mock_log.assert_called() + + assert len(logs) == 11 + values = [] + for i in range(10): + log = logs[i] + validate_log(log, ["bar", "foo"]) + assert log.io.input is not None + assert log.io.output is not None + + values.append(log.io.input.value) + assert log.io.input.type.name == "int" + + assert log.io.output.value == values[-1] + assert log.io.output.type.name == "int" + + assert set(values) == {100, 90, 80, 70, 60, 50, 40, 30, 20, 10} + + last_event = logs[-1] + validate_log(last_event, ["bar"]) + assert last_event.io.input is not None + assert last_event.io.output is not None + + assert set(last_event.io.input.value) == set(values) + assert last_event.io.input.type.name == "list" + assert last_event.io.output.value == 550 + + +@pytest.mark.asyncio +async def test_chained_tags() -> None: + @trace() + async def foo(x: int) -> int: + update_trace_tags(second=str(x)) + if x == 50: + update_trace_tags(first="100") + if x == 40: + update_trace_tags(first=None) + await asyncio.sleep(0.1 + random.random() / 10) + return x + + @trace() + async def bar(x: typing.List[int]) -> int: + update_trace_tags(first=str(len(x))) + res = await asyncio.gather(*map(foo, x)) + return sum(res) + + logs: typing.List[api_types.LogSchema] = [] + async with APIWrapperLogMocker(logs) as mock_log: + assert await bar([100, 90, 80, 70, 60, 50, 40, 30, 20, 10]) == 550 + mock_log.assert_called() + + assert len(logs) == 11 + values = [] + for i in range(10): + log = logs[i] + validate_log(log, ["bar", "foo"]) + assert log.io.input is not None + assert log.io.output is not None + + values.append(log.io.input.value) + assert log.io.input.type.name == "int" + + assert log.io.output.value == values[-1] + assert log.io.output.type.name == "int" + + if values[-1] == 50: + assert log.context.tags == {"first": "100", "second": str(values[-1])} + elif values[-1] == 40: + assert log.context.tags == {"second": str(values[-1])} + else: + assert log.context.tags == {"first": "10", "second": str(values[-1])} + + assert set(values) == {100, 90, 80, 70, 60, 50, 40, 30, 20, 10} + + last_event = logs[-1] + validate_log(last_event, ["bar"]) + assert last_event.io.input is not None + assert last_event.io.output is not None + + assert set(last_event.io.input.value) == set(values) + assert last_event.io.input.type.name == "list" + assert last_event.io.output.value == 550 + assert last_event.context.tags == {"first": "10"} diff --git a/clients/python/gloo_internal/tracer.py b/clients/python/gloo_internal/tracer.py new file mode 100644 index 000000000..50899b2c1 --- /dev/null +++ b/clients/python/gloo_internal/tracer.py @@ -0,0 +1,405 @@ +from __future__ import annotations + +import functools +import inspect +import traceback +from types import TracebackType +import typing +import asyncio +from datetime import datetime +import uuid +from contextvars import ContextVar + +from . import api_types +from .env import ENV +from .api import API + +current_trace_id: ContextVar[typing.Optional[str]] = ContextVar( + "current_trace_id", default=None +) +first_trace_id: ContextVar[typing.Optional[str]] = ContextVar( + "first_trace_id", default=None +) + + +class TraceStackItem: + def __init__(self, *, _id: str, func: str) -> None: + self.__id = _id + self.__func = func + + @property + def id(self) -> str: + return self.__id + + @property + def func(self) -> str: + return self.__func + + +trace_stack: ContextVar[typing.List[TraceStackItem]] = ContextVar( + "trace_stack", default=[] +) + + +class ContextVarStorage: + llmMetadata: typing.Optional[api_types.LLMEventSchema] = None + + def __init__(self, _id: str, tags: typing.Dict[str, str], io: api_types.IO) -> None: + self._id = _id + self.tags = tags + self.io = io + self.start_time = datetime.utcnow() + self.error: typing.Optional[api_types.Error] = None + + async def emit( + self, + *, + error: typing.Optional[api_types.Error], + io: api_types.IO, + # Always includes self. + call_history: typing.List[TraceStackItem], + ) -> None: + if not call_history or call_history[-1].id != self._id: + raise Exception("Call history is not valid.") + + latency_ms = int((datetime.utcnow() - self.start_time).total_seconds() * 1000) + variant_name = self.tags.pop("__variant", None) + + schema_context = api_types.LogSchemaContext( + start_time=self.start_time.isoformat() + "Z", + hostname=ENV.HOSTNAME, + process_id=ENV.GLOO_PROCESS_ID, + stage=ENV.GLOO_STAGE, + latency_ms=latency_ms, + tags=self.tags, + event_chain=[ + api_types.EventChain(function_name=x.func, variant_name=variant_name) + for x in call_history + ], + ) + payload = api_types.LogSchema( + project_id="", + event_type="func_llm" if self.llmMetadata else "func_code", + event_id=self._id, + parent_event_id=call_history[-2].id if len(call_history) > 1 else None, + root_event_id=call_history[0].id, + context=schema_context, + error=error or self.error, + io=io, + metadata=self.llmMetadata, + ) + + try: + await API.log(payload=payload) + except Exception: + pass + + +context_storage: ContextVar[typing.Dict[str, ContextVarStorage]] = ContextVar( + "context_storage", default={} +) + +T = typing.TypeVar("T", bound=typing.Callable[..., typing.Any]) + + +class TraceContext: + io: api_types.IO + + def __init__( + self, + name: str, + func: T, + args: typing.Tuple[typing.Any, ...], + kwargs: typing.Dict[str, typing.Any], + ) -> None: + self.func_name = name + self.args = args + self.kwargs = kwargs + + self.tags = kwargs.pop("__tags", {}) + # Ensure tags are a Dict[str, str] + if not isinstance(self.tags, dict): + raise Exception( + f"Tags must be a Dict[str, str], got {type(self.tags)} instead." + ) + for key, value in self.tags.items(): + if not isinstance(key, str): + raise Exception(f"Tag keys must be strings, got {type(key)} instead.") + if not isinstance(value, str): + raise Exception( + f"Tag values must be strings, got {type(value)} instead." + ) + + self.uid = str(uuid.uuid4()) + + param_names = list(inspect.signature(func).parameters.keys()) + params = {param_names[i]: arg for i, arg in enumerate(args)} + params.update(kwargs) + + if "self" in params: + params.pop("self") + + if "cls" in params: + params.pop("cls") + + if len(params) == 0: + self.io = api_types.IO(input=None, output=None) + elif len(params) == 1: + _, value = params.popitem() + self.io = api_types.IO( + input=api_types.IOValue( + value=value, + type=api_types.TypeSchema(name=type(value).__name__, fields={}), + ), + output=None, + ) + else: + self.io: api_types.IO = api_types.IO( + input=api_types.IOValue( + value=params, + type=api_types.TypeSchema( + name="Dict[str, str]", + fields={ + name: type(value).__name__ for name, value in params.items() + }, + ), + ), + output=None, + ) + + async def __aenter__(self) -> "TraceContext": + self._enter() + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_value: typing.Optional[BaseException], + tb: typing.Optional[TracebackType], + ) -> None: + ctx, chain, error = self._exit(exc_type, exc_value, tb) + await ctx.emit( + io=self.io, + error=error, + call_history=chain, + ) + + def __enter__(self) -> "TraceContext": + self._enter() + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_value: typing.Optional[BaseException], + tb: typing.Optional[TracebackType], + ) -> None: + ctx, chain, error = self._exit(exc_type, exc_value, tb) + asyncio.run( + ctx.emit( + io=self.io, + error=error, + call_history=chain, + ) + ) + + def set_output(self, output: typing.Any) -> None: + self.io.output = api_types.IOValue( + value=output, + type=api_types.TypeSchema(name=type(output).__name__, fields={}), + ) + + def _merge_tags(self) -> None: + current_stack = trace_stack.get() + if current_stack: + parent_context_id = current_stack[-1] + parent_context = context_storage.get().get(parent_context_id.id) + if parent_context: + parent_tags = parent_context.tags + if self.tags: + merged_tags = parent_tags.copy() + merged_tags.update(self.tags) + self.tags = merged_tags + else: + self.tags = parent_tags + + def _enter(self) -> None: + self._merge_tags() + ctx = ContextVarStorage(self.uid, self.tags, self.io) + + current_stack = trace_stack.get() + stack_item = TraceStackItem(_id=self.uid, func=self.func_name) + if current_trace_id.get() is None: + first_trace_id.set(self.uid) + trace_stack.set([stack_item]) + else: + trace_stack.set(current_stack + [stack_item]) + + current_trace_id.set(self.uid) + context_storage.get()[self.uid] = ctx + + def _exit( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_value: typing.Optional[BaseException], + tb: typing.Optional[TracebackType], + ) -> typing.Tuple[ + ContextVarStorage, typing.List[TraceStackItem], typing.Optional[api_types.Error] + ]: + if exc_type is not None: + formatted_traceback = "".join( + traceback.format_exception(exc_type, exc_value, tb) + ) + error = api_types.Error( + # TODO: For GlooErrors, we should have a list of error codes. + code=1, # Unknown error. + message=f"{exc_type.__name__}: {exc_value}", + traceback=formatted_traceback, + ) + else: + error = None + current_stack = trace_stack.get() + trace_stack.set(current_stack[:-1]) + + context_data = context_storage.get() + ctx = context_data.pop(self.uid, None) + context_storage.set(context_data) + + if ctx is None: + raise Exception("Context not found") + return ctx, current_stack, error + + +def set_llm_metadata(metadata: api_types.LLMEventSchema) -> None: + current_stack = trace_stack.get() + if current_stack: + current_id = current_stack[-1] + current_context = context_storage.get().get(current_id.id) + if current_context: + current_context.llmMetadata = metadata + return + raise Exception( + "No trace context found. Please use set_llm_metadata inside a traced function." + ) + + +def set_ctx_error(error: api_types.Error) -> None: + current_stack = trace_stack.get() + if current_stack: + current_id = current_stack[-1] + current_context = context_storage.get().get(current_id.id) + if current_context: + current_context.error = error + return + raise Exception( + "No trace context found. Please use set_ctx_error inside a traced function." + ) + + +def get_ctx() -> typing.Tuple[TraceStackItem, ContextVarStorage]: + current_stack = trace_stack.get() + if current_stack: + current_id = current_stack[-1] + current_context = context_storage.get().get(current_id.id) + if current_context: + return current_id, current_context + raise Exception( + "No trace context found. Please use get_ctx inside a traced function." + ) + + +def trace( + *, + _name: typing.Optional[str] = None, + _tags: typing.Optional[typing.Dict[str, str]] = None, +) -> typing.Callable[[T], T]: + def decorator(func: T) -> T: + name = _name or func.__name__ + tags = _tags or {} + + @functools.wraps(func) + async def wrapper_async(*args: typing.Any, **kwargs: typing.Any) -> typing.Any: + async with TraceContext(name, func, args, kwargs) as ctx: + if tags: + update_trace_tags(**tags) + result = await func(*args, **kwargs) + ctx.set_output(result) + return result + + @functools.wraps(func) + def wrapper_sync(*args: typing.Any, **kwargs: typing.Any) -> typing.Any: + with TraceContext(name, func, args, kwargs) as ctx: + if tags: + update_trace_tags(**tags) + result = func(*args, **kwargs) + ctx.set_output(result) + return result + + if asyncio.iscoroutinefunction(func): + return wrapper_async # type: ignore + else: + return wrapper_sync # type: ignore + + return decorator + + +class TagContextManager: + def __init__(self, **tags: typing.Any) -> None: + self.tags: typing.Dict[str, typing.Any] = tags + self.previous_tags: typing.Optional[typing.Dict[str, typing.Any]] = None + + def __enter__(self) -> None: + current_stack = trace_stack.get() + if current_stack: + current_id = current_stack[-1] + current_context = context_storage.get().get(current_id.id) + if current_context: + # We don't want to mutate the original tags dict + self.previous_tags = current_context.tags.copy() + current_context.tags = {**self.previous_tags, **self.tags} + else: + # Throw an error + raise Exception( + "No trace context found. Please use set_tags inside a traced function." + ) + + def __exit__( + self, exc_type: typing.Any, exc_val: typing.Any, exc_tb: typing.Any + ) -> None: + if self.previous_tags is not None: + current_stack = trace_stack.get() + if current_stack: + current_id = current_stack[-1] + current_context = context_storage.get().get(current_id.id) + if current_context: + current_context.tags = self.previous_tags + + +def update_trace_tags(**tags: str | None) -> None: + """ + Update the tags for the current trace. + + Args: + **tags: The tags to update. If a tag is None, it will be removed. + + Raises: + Exception: If no trace context is found. + """ + current_stack = trace_stack.get() + if current_stack: + current_id = current_stack[-1] + current_context = context_storage.get().get(current_id.id) + if current_context: + prev_tags = current_context.tags.copy() + for k, v in tags.items(): + if v is None: + prev_tags.pop(k, None) + else: + prev_tags[k] = v + + current_context.tags = prev_tags + else: + # Throw an error + raise Exception( + "No trace context found. Please use set_tags inside a traced function." + ) diff --git a/clients/python/gloo_py/__init__.py b/clients/python/gloo_py/__init__.py new file mode 100644 index 000000000..5631f6381 --- /dev/null +++ b/clients/python/gloo_py/__init__.py @@ -0,0 +1,22 @@ +from gloo_internal.context_manager import CodeVariant, LLMVariant +from gloo_internal.env import ENV +from gloo_internal.llm_clients import register_llm_client, llm_client_factory +from gloo_internal.tracer import trace, update_trace_tags + +# For backwards compatibility +from gloo_internal.llm_client import LLMClient, OpenAILLMClient + + +__version__ = "1.1.23" + +__all__ = [ + "CodeVariant", + "LLMVariant", + "ENV", + "LLMClient", + "OpenAILLMClient", + "register_llm_client", + "llm_client_factory", + "trace", + "update_trace_tags", +] diff --git a/clients/python/gloo_py/py.typed b/clients/python/gloo_py/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/gloo_py/stringify/__init__.py b/clients/python/gloo_py/stringify/__init__.py new file mode 100644 index 000000000..987b735b4 --- /dev/null +++ b/clients/python/gloo_py/stringify/__init__.py @@ -0,0 +1,39 @@ +from gloo_stringify import ( + StringifyBase, + StringifyError, + StringifyNone, + StringifyBool, + StringifyInt, + StringifyChar, + StringifyString, + StringifyFloat, + StringifyEnum, + StringifyUnion, + StringifyOptional, + StringifyList, + StringifyClass, + FieldDescription, + EnumFieldDescription, + StringifyRemappedField, + StringifyCtx, +) + +__all__ = [ + "StringifyBase", + "StringifyError", + "StringifyNone", + "StringifyBool", + "StringifyInt", + "StringifyChar", + "StringifyString", + "StringifyFloat", + "StringifyEnum", + "StringifyUnion", + "StringifyOptional", + "StringifyList", + "StringifyClass", + "FieldDescription", + "EnumFieldDescription", + "StringifyRemappedField", + "StringifyCtx", +] diff --git a/clients/python/gloo_py/testing/__init__.py b/clients/python/gloo_py/testing/__init__.py new file mode 100644 index 000000000..ba2bc7084 --- /dev/null +++ b/clients/python/gloo_py/testing/__init__.py @@ -0,0 +1,3 @@ +from gloo_testing import gloo_test + +__all__ = ["gloo_test"] diff --git a/clients/python/gloo_stringify/__init__.py b/clients/python/gloo_stringify/__init__.py new file mode 100644 index 000000000..95337b77e --- /dev/null +++ b/clients/python/gloo_stringify/__init__.py @@ -0,0 +1,28 @@ +from .stringify_enum import StringifyEnum, EnumFieldDescription +from .stringify_primitive import StringifyBool, StringifyNone, StringifyInt, StringifyString, StringifyFloat, StringifyChar +from .stringify_optional import StringifyOptional +from .stringify_union import StringifyUnion +from .stringify_list import StringifyList +from .stringify_class import StringifyClass, FieldDescription +from .stringify import StringifyRemappedField, StringifyCtx, StringifyBase +from .errors import StringifyError + +__all__ = [ + "StringifyBase", + "StringifyError", + "StringifyNone", + "StringifyBool", + "StringifyInt", + "StringifyChar", + "StringifyString", + "StringifyFloat", + "StringifyEnum", + "StringifyUnion", + "StringifyOptional", + "StringifyList", + "StringifyClass", + "FieldDescription", + "EnumFieldDescription", + "StringifyRemappedField", + "StringifyCtx", +] \ No newline at end of file diff --git a/clients/python/gloo_stringify/errors.py b/clients/python/gloo_stringify/errors.py new file mode 100644 index 000000000..428cc0383 --- /dev/null +++ b/clients/python/gloo_stringify/errors.py @@ -0,0 +1,6 @@ +class StringifyError(Exception): + """Raised when an error occurs while stringifying an object.""" + + def __init__(self, message: str) -> None: + super().__init__(message) + \ No newline at end of file diff --git a/clients/python/gloo_stringify/py.typed b/clients/python/gloo_stringify/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/gloo_stringify/stringify.py b/clients/python/gloo_stringify/stringify.py new file mode 100644 index 000000000..fc413f8f3 --- /dev/null +++ b/clients/python/gloo_stringify/stringify.py @@ -0,0 +1,114 @@ +import abc +import json +import typing +from .errors import StringifyError + +T = typing.TypeVar("T") + + +class StringifyBase(abc.ABC, typing.Generic[T]): + @property + def json(self) -> str: + return self._json_str() + + @abc.abstractmethod + def _json_str(self) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def _parse(self, value: typing.Any) -> T: + raise NotImplementedError() + + def parse(self, value: typing.Any) -> T: + try: + return self._parse(value) + except StringifyError as e: + raise e + except ValueError: + raise StringifyError(f"Expected {self.json}, got {value}") + + @abc.abstractmethod + def vars(self) -> typing.Dict[str, str]: + raise NotImplementedError() + + +class StringifyRemappedField: + def __init__( + self, + *, + rename: typing.Optional[str] = None, + describe: typing.Optional[str] = None, + skip: bool = False, + ) -> None: + self.name = rename + self.description = describe + self.skip = skip + + +class StringifyCtx: + _context_stack: typing.List[object] = [] + _instances_stack: typing.List[typing.Any] = [] + + def __enter__(self) -> "StringifyCtx": + self.current_context = object() + StringifyCtx._context_stack.append(self.current_context) + StringifyCtx._instances_stack.append({}) + return self + + def __exit__( + self, exc_type: typing.Any, exc_value: typing.Any, traceback: typing.Any + ) -> None: + StringifyCtx._context_stack.pop() + StringifyCtx._instances_stack.pop() + + @staticmethod + def get_current_context() -> typing.Optional[object]: + return StringifyCtx._context_stack[-1] if StringifyCtx._context_stack else None + + @staticmethod + def set_instance_for_current_context(cls: typing.Any, instance: typing.Any) -> None: + current_context = StringifyCtx.get_current_context() + if current_context: + StringifyCtx._instances_stack[-1][cls] = instance + + @staticmethod + def get_instance_for_current_context( + cls: typing.Any, + ) -> typing.Optional[typing.Any]: + current_context = StringifyCtx.get_current_context() + if current_context and cls in StringifyCtx._instances_stack[-1]: + return StringifyCtx._instances_stack[-1][cls] + return None + + +def as_singular(value: typing.Any) -> typing.Any: + try: + if isinstance(value, str): + stripped = value.strip() + if ( + (stripped.startswith("[") and stripped.endswith("]")) + or (stripped.startswith("{") and stripped.endswith("}")) + or (stripped.startswith("(") and stripped.endswith(")")) + ): + parsed = json.loads(stripped) + if isinstance(parsed, (list, tuple)): + if len(parsed) >= 1: + return parsed[0] + if isinstance(parsed, (set, frozenset)): + if len(parsed) >= 1: + return next(iter(parsed)) + if isinstance(parsed, dict): + if len(parsed) >= 1: + return next(iter(parsed.values())) + except json.JSONDecodeError: + pass + if isinstance(value, (list, tuple)): + if len(value) >= 1: + return value[0] + if isinstance(value, (set, frozenset)): + if len(value) >= 1: + return next(iter(value)) + if isinstance(value, dict): + if len(value) >= 1: + return next(iter(value.values())) + return value diff --git a/clients/python/gloo_stringify/stringify_class.py b/clients/python/gloo_stringify/stringify_class.py new file mode 100644 index 000000000..f5b1565c7 --- /dev/null +++ b/clients/python/gloo_stringify/stringify_class.py @@ -0,0 +1,150 @@ +from __future__ import annotations +import typing +from .stringify import StringifyBase, StringifyRemappedField, StringifyCtx +from .errors import StringifyError +from pydantic import BaseModel +import json + +U = typing.TypeVar("U", bound=BaseModel) +T = typing.TypeVar("T") + + +class FieldDescription(typing.Generic[T]): + def __init__( + self, + name: str, + description: None | str, + type_desc: StringifyBase[T], + ) -> None: + self.name = name + self.__description = description + self.__type = type_desc + + @property + def _type(self) -> StringifyBase[T]: + return self.__type + + @property + def description(self) -> str: + if self.__description: + return self.__description + return self.__type.json + + def __str__(self) -> str: + return self.name + + @property + def json(self) -> str: + return self.__type.json + + def vars(self) -> typing.Dict[str, str]: + return self.__type.vars() + + +def update_field_description( + field: FieldDescription[T], + *, + update: typing.Optional[StringifyRemappedField] = None, +) -> FieldDescription[T]: + if update is None: + return field + return FieldDescription( + name=update.name or field.name, + description=update.description or field.description, + type_desc=field._type, + ) + + +class StringifyClass(StringifyBase[U]): + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> "StringifyClass[U]": + instance = StringifyCtx.get_instance_for_current_context(cls) + if not instance: + instance = super(StringifyClass, cls).__new__(cls) + StringifyCtx.set_instance_for_current_context(cls, instance) + instance._initialized = False # type: ignore + else: + instance._initialized = True + return instance + + def __init__( + self, + *, + model: typing.Type[U], + values: typing.Dict[str, FieldDescription[typing.Any]], + updates: typing.Dict[str, StringifyRemappedField], + ) -> None: + # If this instance is already initialized, don't execute __init__ again + if getattr(self, "_initialized", False): + return None + + props = { + k: update_field_description(v, update=updates.get(k)) + for k, v in values.items() + } + self.__props = props + self.__reverse_props = {v.name.lower(): k for k, v in props.items()} + self.__model = model + self.__name = model.__name__ + + def __getattribute__(self, item: str) -> typing.Any: + # Attempt to return the attribute using the standard method + try: + return super().__getattribute__(item) + except AttributeError: + # If it fails, use the custom logic in __getattr__ + return self.__getattr__(item) + + def __getattr__(self, item: str) -> FieldDescription[typing.Any]: + # This will only be called if the attribute is not found through the standard methods + res = self.__props.get(item) + if res is None: + raise AttributeError(f"Unknown field: {item}") + return res + + def _json_str(self) -> str: + vals = [f'"{v.name}": {v.description}' for v in self.__props.values()] + # join the values with a newline and indent them + joined = ",\n".join(f" {v}" for v in vals) + return "{\n" + joined + "\n}" + + def _parse(self, value: typing.Any) -> U: + if isinstance(value, str): + value = json.loads(value) + if not isinstance(value, dict): + raise StringifyError(f"Expected dict, got {value} ({type(value)})") + + # Force all keys to be strings. + dict_value = {str(k): v for k, v in value.items()} + + # Replace all keys with the renamed keys + props = self.__props + rev_props = self.__reverse_props + dict_value = { + rev_props[k.lower()]: props[rev_props[k.lower()]]._type.parse(v) + for k, v in dict_value.items() + if k.lower() in rev_props + } + try: + return self.__model.model_validate(dict_value) + except Exception as e: + raise StringifyError( + f"Expected {self.__name} as {self.json}, got {value} ({type(value)}): {e}" + ) + + def vars(self) -> typing.Dict[str, str]: + v = { + f"{self.__name}.{k}": { + "name": v.name, + "description": v.description, + "json": v.json, + } + for k, v in self.__props.items() + } + # Flatten the dict + x = {f"{k}.{k2}": v2 for k, v in v.items() for k2, v2 in v.items()} + + for k, v1 in self.__props.items(): + x[f"{self.__name}.{k}"] = v1.name + + x[f"{self.__name}.json"] = self.json + return x diff --git a/clients/python/gloo_stringify/stringify_enum.py b/clients/python/gloo_stringify/stringify_enum.py new file mode 100644 index 000000000..ce3e89313 --- /dev/null +++ b/clients/python/gloo_stringify/stringify_enum.py @@ -0,0 +1,121 @@ +from __future__ import annotations +import typing +from enum import Enum +from .stringify import StringifyBase, StringifyRemappedField, StringifyCtx, as_singular +from .errors import StringifyError + +T = typing.TypeVar("T", bound=Enum) + + +class EnumFieldDescription: + def __init__(self, *, name: str, description: None | str, skip: bool) -> None: + self.name = name + self.description = description + self.skip = skip + + def __str__(self) -> str: + return self.name + + def to_enum_str(self) -> str: + if self.description is None: + return self.name + return f"{self.name}: {self.description}" + + +def update_field_description( + field: EnumFieldDescription, + *, + update: typing.Optional[StringifyRemappedField] = None, +) -> EnumFieldDescription: + if update is None: + return field + return EnumFieldDescription( + name=update.name or field.name, + description=update.description or field.description, + skip=update.skip or field.skip, + ) + + +class StringifyEnum(StringifyBase[T]): + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> "StringifyEnum[T]": + instance = StringifyCtx.get_instance_for_current_context(cls) + if not instance: + instance = super(StringifyEnum, cls).__new__(cls) + StringifyCtx.set_instance_for_current_context(cls, instance) + instance._initialized = False # type: ignore + else: + instance._initialized = True + return instance + + def __init__( + self, + *, + values: typing.Dict[T, EnumFieldDescription], + updates: typing.Dict[str, StringifyRemappedField], + ) -> None: + # If this instance is already initialized, don't execute __init__ again + if getattr(self, "_initialized", False): + return None + + self.__name = list(values.keys())[0].__class__.__name__ + props = { + k: update_field_description(v, update=updates.get(k.name)) + for k, v in values.items() + } + self.__props = {k.name: v for k, v in props.items()} + self.__reverse_props = {v.name.lower(): k for k, v in props.items()} + + def __getattribute__(self, item: str) -> typing.Any: + # Attempt to return the attribute using the standard method + try: + return super().__getattribute__(item) + except AttributeError: + # If it fails, use the custom logic in __getattr__ + return self.__getattr__(item) + + def __getattr__(self, item: str) -> EnumFieldDescription: + # This will only be called if the attribute is not found through the standard methods + res = self.__props.get(item) + if res is None: + raise AttributeError(f"Unknown field: {item}") + return res + + @property + def names(self) -> str: + return " | ".join( + [f'"{val.name}"' for val in self.__props.values() if not val.skip] + ) + + @property + def values(self) -> str: + return "\n".join( + [val.to_enum_str() for val in self.__props.values() if not val.skip] + ) + + @property + def description(self) -> str: + return self.names + + def _json_str(self) -> str: + return self.names + + def _parse(self, value: typing.Any) -> T: + value = as_singular(value) + + if not isinstance(value, str): + raise StringifyError(f"Invalid enum: {value}: {type(value)}") + val = self.__reverse_props.get(value.lower()) + if val is None: + raise StringifyError(f"Invalid enum: {value}: {type(value)}") + return val + + def vars(self) -> typing.Dict[str, str]: + v = { + f"{self.__name}.names": self.names, + f"{self.__name}.values": self.values, + } + for k, val in self.__props.items(): + v[f"{self.__name}.{k}"] = val.name + v[f"{self.__name}.{k}.name"] = val.name + v[f"{self.__name}.{k}.desc"] = val.description or "" + return v diff --git a/clients/python/gloo_stringify/stringify_list.py b/clients/python/gloo_stringify/stringify_list.py new file mode 100644 index 000000000..662a804df --- /dev/null +++ b/clients/python/gloo_stringify/stringify_list.py @@ -0,0 +1,37 @@ +import json +import typing +from .stringify import StringifyBase +from .stringify_primitive import StringifyPrimitive +from .stringify_class import StringifyClass +from .errors import StringifyError + +T = typing.TypeVar("T") + + +class StringifyList(StringifyBase[typing.List[T]]): + def __init__(self, args: StringifyBase[T]) -> None: + self.__args = args + + def _json_str(self) -> str: + if isinstance(self.__args, (StringifyPrimitive, StringifyClass)): + return f"{self.__args.json}[]" + return f"({self.__args.json})[]" + + def _parse(self, value: typing.Any) -> typing.List[T]: + if isinstance(value, str): + if value.startswith("[") and value.endswith("]"): + value = json.loads(value) + # Make sure we have a list + if not isinstance(value, list): + value = [value] + result: typing.List[T] = [] + for item in value: + try: + parsed = self.__args.parse(item) + result.append(parsed) + except StringifyError: + pass + return result + + def vars(self) -> typing.Dict[str, str]: + return self.__args.vars() diff --git a/clients/python/gloo_stringify/stringify_optional.py b/clients/python/gloo_stringify/stringify_optional.py new file mode 100644 index 000000000..e9886f1b0 --- /dev/null +++ b/clients/python/gloo_stringify/stringify_optional.py @@ -0,0 +1,23 @@ +import typing +from .stringify import StringifyBase + +U = typing.TypeVar("U") + + +class StringifyOptional(StringifyBase[typing.Optional[U]]): + def __init__(self, args: StringifyBase[U]) -> None: + self.__args = args + + def _json_str(self) -> str: + return f"{self.__args.json} | null" + + def _parse(self, value: typing.Any) -> typing.Optional[U]: + if value is None: + return None + if isinstance(value, str): + if value.strip().lower() == "null": + return None + return self.__args.parse(value) + + def vars(self) -> typing.Dict[str, str]: + return self.__args.vars() diff --git a/clients/python/gloo_stringify/stringify_primitive.py b/clients/python/gloo_stringify/stringify_primitive.py new file mode 100644 index 000000000..f6eef5796 --- /dev/null +++ b/clients/python/gloo_stringify/stringify_primitive.py @@ -0,0 +1,174 @@ +import json +import re +import typing +from .stringify import StringifyBase, as_singular +from .errors import StringifyError + +T = typing.TypeVar("T", str, int, float, bool, None) + + +class StringifyPrimitive(StringifyBase[T]): + pass + + +class StringifyString(StringifyPrimitive[str]): + def _json_str(self) -> str: + return "string" + + def _parse(self, value: typing.Any) -> str: + value = as_singular(value) + if isinstance(value, str): + stripped = value.strip() + if stripped.startswith('"""') and stripped.endswith('"""'): + return stripped[3:-3] + if stripped.startswith("'''") and stripped.endswith("'''"): + return stripped[3:-3] + if stripped.startswith('"') and stripped.endswith('"'): + return stripped[1:-1] + if stripped.startswith("'") and stripped.endswith("'"): + return stripped[1:-1] + return stripped + return str(value) + + def vars(self) -> typing.Dict[str, str]: + return {} + + +class StringifyChar(StringifyPrimitive[str]): + def _json_str(self) -> str: + return "char" + + def _parse(self, value: typing.Any) -> str: + value = as_singular(value) + + if isinstance(value, str): + stripped = value.strip() + if stripped.startswith('"""') and stripped.endswith('"""'): + stripped = stripped[3:-3] + elif stripped.startswith("'''") and stripped.endswith("'''"): + stripped = stripped[3:-3] + elif stripped.startswith('"') and stripped.endswith('"'): + stripped = stripped[1:-1] + elif stripped.startswith("'") and stripped.endswith("'"): + stripped = stripped[1:-1] + elif len(stripped) == 1: + stripped = stripped + + cleaned = stripped.strip() + # Log warning if string is longer than 1 char + if len(cleaned) == 0: + raise StringifyError(f"Expected char, got {stripped}") + return cleaned[0] + val = str(value) + if len(val) == 0: + raise StringifyError(f"Expected char, got {value}") + return val[0] + + def vars(self) -> typing.Dict[str, str]: + return {} + + +class StringifyFloat(StringifyPrimitive[float]): + def _json_str(self) -> str: + return "float" + + def _parse(self, value: typing.Any) -> float: + value = as_singular(value) + if isinstance(value, str): + cleaned = value.strip().lower() + # Validate string only has digits and or a single decimal point. + # A starting negative sign is allowed, and starting digit is not required. + # Commas are allowed, but only between digits before the decimal point. + if re.match(r"^-?(\d+,?)*\.?\d+$", cleaned): + # Remove commas + cleaned = cleaned.replace(",", "") + return float(cleaned) + else: + try: + return float(json.loads(value.lower())) + except TypeError: + raise StringifyError(f"Expected float, got string: {value}") + except json.JSONDecodeError: + raise StringifyError(f"Expected float, got string: {value}") + try: + return float(value) + except TypeError: + raise StringifyError(f"Expected float, got {value}") + except ValueError: + raise StringifyError(f"Expected float, got {value}") + + def vars(self) -> typing.Dict[str, str]: + return {} + + +class StringifyInt(StringifyPrimitive[int]): + def _json_str(self) -> str: + return "int" + + def _parse(self, value: typing.Any) -> int: + value = as_singular(value) + if isinstance(value, str): + cleaned = value.strip().lower() + # Validate string only has digits. + # A starting negative sign is allowed, and starting digit is not required. + # Commas are allowed, but only between digits. + if re.match(r"^-?(\d+,?)*\d+$", cleaned): + # Remove commas + cleaned = cleaned.replace(",", "") + return int(cleaned) + else: + try: + return int(json.loads(cleaned)) + except json.JSONDecodeError: + raise StringifyError(f"Expected int, got string: {value}") + except TypeError: + raise StringifyError(f"Expected int, got string: {value}") + try: + return int(value) + except TypeError: + raise StringifyError(f"Expected int, got {value}") + except ValueError: + raise StringifyError(f"Expected int, got {value}") + + def vars(self) -> typing.Dict[str, str]: + return {} + + +class StringifyBool(StringifyPrimitive[bool]): + def _json_str(self) -> str: + return "bool" + + def _parse(self, value: typing.Any) -> bool: + value = as_singular(value) + print(value, type(value)) + if isinstance(value, str): + cleaned = value.strip().lower() + if cleaned == "true": + return True + elif cleaned == "false": + return False + else: + try: + return bool(as_singular(cleaned)) + except json.JSONDecodeError: + raise StringifyError(f"Expected bool, got string: {value}") + except TypeError: + raise StringifyError(f"Expected bool, got string: {value}") + try: + return bool(value) + except ValueError: + raise StringifyError(f"Expected bool, got {value}") + + def vars(self) -> typing.Dict[str, str]: + return {} + + +class StringifyNone(StringifyPrimitive[None]): + def _json_str(self) -> str: + return "null" + + def _parse(self, value: typing.Any) -> None: + return None + + def vars(self) -> typing.Dict[str, str]: + return {} diff --git a/clients/python/gloo_stringify/stringify_union.py b/clients/python/gloo_stringify/stringify_union.py new file mode 100644 index 000000000..a92840895 --- /dev/null +++ b/clients/python/gloo_stringify/stringify_union.py @@ -0,0 +1,27 @@ +import typing +from .stringify import StringifyBase +from .errors import StringifyError + +T = typing.TypeVar("T") + + +class StringifyUnion(StringifyBase[T]): + def __init__(self, *args: StringifyBase[typing.Any]) -> None: + self.__args = args + + def _json_str(self) -> str: + return " | ".join(map(lambda x: x.json, self.__args)) + + def _parse(self, value: typing.Any) -> T: + for arg in self.__args: + try: + return typing.cast(T, arg.parse(value)) + except StringifyError: + pass + raise StringifyError(f"Could not parse {value} as {self.json}") + + def vars(self) -> typing.Dict[str, str]: + v = {} + for arg in self.__args: + v.update(arg.vars()) + return v diff --git a/clients/python/gloo_stringify/tests/__init__.py b/clients/python/gloo_stringify/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/gloo_stringify/tests/test_stringify.py b/clients/python/gloo_stringify/tests/test_stringify.py new file mode 100644 index 000000000..939689ba3 --- /dev/null +++ b/clients/python/gloo_stringify/tests/test_stringify.py @@ -0,0 +1,237 @@ +import json +import pytest +from pydantic import BaseModel +import typing +from typing import Any + +from ..stringify_class import FieldDescription +from .. import ( + StringifyFloat, + StringifyCtx, + StringifyClass, + StringifyInt, + StringifyString, + StringifyList, + StringifyOptional, + StringifyBool, + StringifyChar, +) + + +class TestPrimitive: + @pytest.mark.parametrize( + "actual,expected", + [ + (1, "1"), + (1.0, "1.0"), + (True, "True"), + (False, "False"), + (None, "None"), + ("hello", "hello"), + ("'hello'", "hello"), + ('"hello"', "hello"), + ("'''hello'''", "hello"), + ('"""hello"""', "hello"), + (" hello", "hello"), + # Array to string + (["hello"], "hello"), + (["hello", "world"], "hello"), + ('["hello"]', "hello"), + ('["hello", "world"]', "hello"), + # Tuple to string + (("hello",), "hello"), + (("hello", "world"), "hello"), + # Tuples as strings aren't supported + ('("hello")', '("hello")'), + ('("hello", "world")', '("hello", "world")'), + # Object to string + ({"hello": "world"}, "world"), + ('{"hello": "world"}', "world"), + ('{"hello": "world", "hello2": "world2"}', "world"), + ({"hello": "world", "hello2": "world2"}, "world"), + ], + ) + def test_string(self, actual: Any, expected: str) -> None: + with StringifyCtx(): + x = StringifyString().parse(actual) + assert isinstance(x, str) + if '"' in expected: + alt_expected = expected.replace('"', "'") + elif "'" in expected: + alt_expected = expected.replace("'", '"') + else: + alt_expected = None + if alt_expected: + assert x == expected or x == alt_expected + else: + assert x == expected + + @pytest.mark.parametrize( + "actual,expected", + [ + (1, "1"), + (1.0, "1"), + (True, "T"), + (False, "F"), + (None, "N"), + ("hello", "h"), + ("'hello'", "h"), + ('"hello"', "h"), + ("'''hello'''", "h"), + ('"""hello"""', "h"), + (" hello", "h"), + # Array to string + (["hello"], "h"), + (["hello", "world"], "h"), + ('["hello"]', "h"), + ('["hello", "world"]', "h"), + # Tuple to string + (("hello",), "h"), + (("hello", "world"), "h"), + # Tuples as strings aren't supported + ('("hello")', "("), + ('("hello", "world")', "("), + # Object to string + ({"hello": "world"}, "w"), + ('{"hello": "world"}', "w"), + ('{"hello": "world", "hello2": "1234"}', "w"), + ({"hello": "world", "hello2": "1234"}, "w"), + ], + ) + def test_char(self, actual: Any, expected: str) -> None: + with StringifyCtx(): + x = StringifyChar().parse(actual) + assert isinstance(x, str) + if '"' in expected: + alt_expected = expected.replace('"', "'") + elif "'" in expected: + alt_expected = expected.replace("'", '"') + else: + alt_expected = None + if alt_expected: + assert x == expected or x == alt_expected + else: + assert x == expected + + @pytest.mark.parametrize( + "actual,expected", + [ + (1, 1), + (1.1, 1), + (1.6, 1), + (True, 1), + (False, 0), + ([1, 2, 3], 1), + ], + ) + @pytest.mark.parametrize("l", [None, json.dumps, str]) + def test_int(self, l: Any, actual: Any, expected: int) -> None: + if l: + actual = l(actual) + with StringifyCtx(): + x = StringifyInt().parse(actual) + assert isinstance(x, int) + assert x == expected + + @pytest.mark.parametrize( + "actual,expected", + [ + (1, 1), + (1.1, 1.1), + (1.6, 1.6), + (True, 1), + (False, 0), + ([1.1, 2.11, 3], 1.1), + ], + ) + @pytest.mark.parametrize("l", [None, json.dumps, str]) + def test_float(self, l: Any, actual: Any, expected: int) -> None: + if l: + actual = l(actual) + with StringifyCtx(): + x = StringifyFloat().parse(actual) + assert isinstance(x, float) + assert x == expected + + @pytest.mark.parametrize( + "actual,expected", + [ + (1, True), + (1.1, True), + (True, True), + (False, False), + ([1.1, 2.11, 3], True), + ([], False), + ([1], True), + ([1, 2], True), + ([0], False), + ([True], True), + ([False], False), + ("true", True), + ("false", False), + ], + ids=[ + "int", + "float", + "True", + "False", + "list", + "empty list", + "list with 1 element", + "list with 2 elements", + "list with 0", + "list with True", + "list with False", + "string true", + "string false", + ], + ) + @pytest.mark.parametrize("l", [None, json.dumps, str]) + def test_bool(self, l: Any, actual: Any, expected: int) -> None: + if l: + actual = l(actual) + with StringifyCtx(): + x = StringifyBool().parse(actual) + assert isinstance(x, bool) + assert x == expected + + +class TestClass: + def test_class_a(self) -> None: + class ModelA(BaseModel): + a: int + b: str + c: typing.List[int] + d: typing.Optional[str] + + with StringifyCtx(): + clx = StringifyClass( + model=ModelA, + values={ + "a": FieldDescription( + name="a", description=None, type_desc=StringifyInt() + ), + "b": FieldDescription( + name="b", description=None, type_desc=StringifyString() + ), + "c": FieldDescription( + name="c", + description=None, + type_desc=StringifyList(StringifyInt()), + ), + "d": FieldDescription( + name="d", + description=None, + type_desc=StringifyOptional(StringifyString()), + ), + }, + updates={}, + ) + + x = clx.parse("""{"a": 1, "b": '2', "c": [3, 4], "d": '5'}""") + assert x.a == 1 + assert x.b == "2" + assert x.c == [3, 4] + assert x.d == "5" + + del x diff --git a/clients/python/gloo_testing/__init__.py b/clients/python/gloo_testing/__init__.py new file mode 100644 index 000000000..bb15d42e5 --- /dev/null +++ b/clients/python/gloo_testing/__init__.py @@ -0,0 +1,3 @@ +from .pytest_gloo import gloo_test + +__all__ = ["gloo_test"] diff --git a/clients/python/gloo_testing/py.typed b/clients/python/gloo_testing/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/gloo_testing/pytest_gloo.py b/clients/python/gloo_testing/pytest_gloo.py new file mode 100644 index 000000000..e64ef1ab5 --- /dev/null +++ b/clients/python/gloo_testing/pytest_gloo.py @@ -0,0 +1,269 @@ +import asyncio +import typing +import pytest +from gloo_internal.api import API +from gloo_internal.tracer import trace +from gloo_internal.api_types import ( + TestCaseStatus, +) +from gloo_internal.env import ENV +from gloo_internal.logging import logger +import os +import re + +from gloo_internal import api_types + +gloo_test = pytest.mark.gloo_test + + +class GlooTestCaseBase(typing.TypedDict): + name: str + + +T = typing.TypeVar("T", bound=GlooTestCaseBase) + + +def pytest_configure(config: pytest.Config) -> None: + logger.debug("Registering pytest_gloo plugin.") + config.addinivalue_line( + "markers", "gloo_test: mark test as a gloo test to be run in gloo services" + ) + config.pluginmanager.register(GlooPytestPlugin(), "pytest_gloo") + + +class TestCaseMetadata: + node_id: str + dataset_name: str + test_name: str + case_name: str + + def __init__(self, item: pytest.Item) -> None: + self.node_id = item.nodeid + self.dataset_name = item.parent.name if item.parent else "Ungrouped" + + test_name = "test" + case_name = item.name + + # TODO: Do this better. + # test_name = item.name + # try: + # match = re.search(r"\[(.*?)\]", test_name) + # if match: + # case_name = match.group(1) + # test_name = re.sub(r"\[.*?\]", "", test_name) + # else: + # case_name = "__default__" + # except AttributeError: + # case_name = "__error__" + + self.test_name = test_name + self.case_name = case_name + + def __str__(self) -> str: + return f"{self.dataset_name}/{self.test_name}/{self.case_name}" + + @property + def tags(self) -> typing.Dict[str, str]: + return { + "dataset_name": self.dataset_name, + "test_name": self.test_name, + "case_name": self.case_name, + } + + +def sanitize(input_str: str) -> str: + return re.sub(r"[^\w-]", "_", input_str) + + +# See https://docs.pytest.org/en/7.1.x/_modules/_pytest/hookspec.html#pytest_runtestloop +class GlooPytestPlugin: + def __init__(self) -> None: + self.__gloo_tests: typing.Dict[str, TestCaseMetadata] = {} + self.__completed_tests: typing.Set[str] = set() + + # On register, we want to set the STAGE env variable + # to "test" so that the tracer knows to send the logs + def pytest_sessionstart(self, session: pytest.Session) -> None: + os.environ["GLOO_STAGE"] = "test" + + @pytest.hookimpl(tryfirst=True) + def pytest_collection_finish(self, session: pytest.Session) -> None: + """Called after collection has been performed and modified. + + :param pytest.Session session: The pytest session object. + """ + + # Check if any of the tests are marked as gloo tests + # If not, we can skip the rest of the setup + + for item in session.items: + if any(map(lambda mark: mark.name == "gloo_test", item.iter_markers())): + self.__gloo_tests[item.nodeid] = TestCaseMetadata(item) + # logger.info( + # f"Found gloo test: {item.nodeid}: {self.__gloo_tests[item.nodeid]}" + # ) + + async def maybe_start_logging(self, session: pytest.Session) -> None: + logger.debug( + f"Starting logging: Num Tests: {len(self.__gloo_tests)}, {len(session.items)}" + ) + if len(self.__gloo_tests) == 0: + logger.debug("No Gloo tests detected") + return + + logger.debug("Creating test cases") + + dataset_cases: typing.Dict[str, typing.Dict[str, typing.List[str]]] = {} + for item in self.__gloo_tests.values(): + # Add case_name to the corresponding dataset + if item.dataset_name not in dataset_cases: + dataset_cases[item.dataset_name] = {} + if item.test_name not in dataset_cases[item.dataset_name]: + dataset_cases[item.dataset_name][item.test_name] = [] + dataset_cases[item.dataset_name][item.test_name].append(item.case_name) + + # Validate that no duplicate test cases are being created + for dataset_name, test_cases in dataset_cases.items(): + for test_name, case_names in test_cases.items(): + if len(set(case_names)) != len(case_names): + duplicate_cases = [ + case_name + for case_name in case_names + if case_names.count(case_name) > 1 + ] + raise Exception( + f"Duplicate test cases found in dataset {dataset_name} test {test_name}: {duplicate_cases}" + ) + + await API.test.create_session() + + await asyncio.gather( + *list( + API.test.create_cases( + payload=api_types.CreateTestCase( + test_dataset_name=dataset_name, + test_name=test_name, + test_case_args=[{"name": c} for c in case_names], + ) + ) + for dataset_name, test_cases in dataset_cases.items() + for test_name, case_names in test_cases.items() + ) + ) + + @pytest.hookimpl(tryfirst=True) + def pytest_runtestloop( + self, session: pytest.Session + ) -> typing.Optional[typing.Any]: + if ( + session.testsfailed + and not session.config.option.continue_on_collection_errors + ): + raise session.Interrupted( + "%d errors during collection" % session.testsfailed + ) + + if session.config.option.collectonly: + return True + + asyncio.run(self.maybe_start_logging(session)) + return None + + @pytest.hookimpl(tryfirst=True) + def pytest_runtest_logstart( + self, nodeid: str, location: typing.Tuple[str, typing.Optional[int], str] + ) -> None: + """Called at the start of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param str nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)``. + """ + if nodeid in self.__gloo_tests: + item = self.__gloo_tests[nodeid] + # Log the start of the test + API.test.update_case_sync( + payload=api_types.UpdateTestCase( + test_dataset_name=item.dataset_name, + test_case_definition_name=item.test_name, + test_case_arg_name=item.case_name, + status=TestCaseStatus.RUNNING, + error_data=None, + ) + ) + + # wrapper ensures we can yield to other hooks + # this one just sets the context but doesnt actually run + # the test. It lets the "default" hook run the test. + @pytest.hookimpl(tryfirst=True) + def pytest_runtest_call(self, item: pytest.Item) -> None: + if item.nodeid not in self.__gloo_tests: + return + + # Before running the test, make this a traced function. + meta = self.__gloo_tests[item.nodeid] + tags = dict( + test_case_arg_name=meta.case_name, + test_case_name=meta.test_name, + test_cycle_id=ENV.GLOO_PROCESS_ID, + test_dataset_name=meta.dataset_name, + ) + item.obj = trace(_tags=tags)(item.obj) # type: ignore + + @pytest.hookimpl(tryfirst=True) + def pytest_runtest_makereport( + self, item: pytest.Item, call: pytest.CallInfo[typing.Any] + ) -> None: + if item.nodeid not in self.__gloo_tests: + return + + if call.when == "call": + status = ( + TestCaseStatus.PASSED if call.excinfo is None else TestCaseStatus.FAILED + ) + + meta = self.__gloo_tests[item.nodeid] + API.test.update_case_sync( + payload=api_types.UpdateTestCase( + test_dataset_name=meta.dataset_name, + test_case_definition_name=meta.test_name, + test_case_arg_name=meta.case_name, + status=status, + error_data={"error": str(call.excinfo.value)} + if call.excinfo + else None, + ) + ) + self.__completed_tests.add(item.nodeid) + + @pytest.hookimpl(tryfirst=True) + def pytest_sessionfinish( + self, + session: pytest.Session, + exitstatus: typing.Union[int, pytest.ExitCode], + ) -> None: + if session.config.option.collectonly: + return + + if ( + session.testsfailed + and not session.config.option.continue_on_collection_errors + ): + return + + try: + for nodeid, meta in self.__gloo_tests.items(): + if nodeid not in self.__completed_tests: + API.test.update_case_sync( + payload=api_types.UpdateTestCase( + test_dataset_name=meta.dataset_name, + test_case_definition_name=meta.test_name, + test_case_arg_name=meta.case_name, + status=TestCaseStatus.CANCELLED, + error_data=None, + ) + ) + except Exception as e: + # If we don't catch this the user is not able to see any other underlying test errors. + logger.error(f"Failed to update test case status: {e}") diff --git a/clients/python/mypy.ini b/clients/python/mypy.ini new file mode 100644 index 000000000..682d25f1a --- /dev/null +++ b/clients/python/mypy.ini @@ -0,0 +1,8 @@ +[mypy] +strict = True + +[mypy-coloredlogs.*] +ignore_missing_imports = True + +[mypy-openai.*] +ignore_untyped_calls = True diff --git a/clients/python/py.typed b/clients/python/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml new file mode 100644 index 000000000..76ccf9836 --- /dev/null +++ b/clients/python/pyproject.toml @@ -0,0 +1,52 @@ +[build-system] +requires = [ "poetry-core",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "gloo-lib" +version = "1.1.23" +description = "" +authors = [ "Gloo ",] +[[tool.poetry.packages]] +include = "gloo_py" +from = "." + +[[tool.poetry.packages]] +include = "gloo_stringify" +from = "." + +[[tool.poetry.packages]] +include = "gloo_internal" +from = "." + +[[tool.poetry.packages]] +include = "gloo_testing" +from = "." + +[tool.mypy] +strict = true + +[tool.poetry.dependencies] +python = "^3.8" +pydantic = "^2.3.0" +aiohttp = "^3.8.5" +coloredlogs = "^15.0.1" +pytest = "^7.4.2" +pytest-asyncio = "^0.21.1" +openai = ">=0.27.9" +types-requests = "^2.31.0.2" +anthropic = "^0.3.11" +python-dotenv = "^1.0.0" + +[tool.mypy."coloredlogs.*"] +ignore_missing_imports = true + +[tool.poetry.plugins.pytest11] +pytest-gloo = "gloo_testing.pytest_gloo" + +[tool.poetry.group.dev.dependencies] +mypy = "^1.5.1" +types-decorator = "^5.1.8.4" +types-mock = "^5.1.0.2" +mock = "^5.1.0" +ruff = "^0.0.291" diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..745062ae7 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,34 @@ +# Mintlify Starter Kit + +Click on `Use this template` to copy the Mintlify starter kit. The starter kit contains examples including + +- Guide pages +- Navigation +- Customizations +- API Reference pages +- Use of popular components + +### 👩‍💻 Development + +Install the [Mintlify CLI](https://www.npmjs.com/package/mintlify) to preview the documentation changes locally. To install, use the following command + +``` +npm i -g mintlify +``` + +Run the following command at the root of your documentation (where mint.json is) + +``` +mintlify dev +``` + +### 😎 Publishing Changes + +Changes will be deployed to production automatically after pushing to the default branch. + +You can also preview changes using PRs, which generates a preview link of the docs. + +#### Troubleshooting + +- Mintlify dev isn't running - Run `mintlify install` it'll re-install dependencies. +- Page loads as a 404 - Make sure you are running in a folder with `mint.json` diff --git a/docs/_snippets/snippet-example.mdx b/docs/_snippets/snippet-example.mdx new file mode 100644 index 000000000..089334c54 --- /dev/null +++ b/docs/_snippets/snippet-example.mdx @@ -0,0 +1,3 @@ +## My Snippet + +This is an example of a reusable snippet diff --git a/docs/api-reference/authentication.mdx b/docs/api-reference/authentication.mdx new file mode 100644 index 000000000..4578f69bb --- /dev/null +++ b/docs/api-reference/authentication.mdx @@ -0,0 +1,22 @@ +--- +title: "Authentication" +description: "Example overview page before API endpoints" +--- + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas et eros iaculis tortor dapibus cursus. Curabitur quis sapien nec tortor dictum gravida. + +```bash +'Authorization': 'Token ' +``` + +## API Tokens + +Nullam convallis mauris at nunc consectetur, ac imperdiet leo rutrum. Maecenas cursus purus a pellentesque blandit. Pellentesque vitae lacinia libero, non mollis metus. + +Nam id ullamcorper urna, at rutrum enim. [Maecenas vulputate](/introduction) vehicula libero, vitae sodales augue pretium nec. Quisque a magna tempor, semper risus vel, fermentum nunc. Pellentesque fermentum interdum ex, eu convallis massa blandit sed. Aliquam bibendum ipsum vel laoreet auctor. + +### Permissions + +Etiam lobortis ut odio ut fermentum. Nunc odio velit, sollicitudin at consectetur id, tristique eget turpis. Aliquam at risus vitae dolor sodales venenatis. In hac habitasse platea dictumst. + +Aenean consequat diam eget mollis fermentum. [Quisque eu malesuada](/introduction) felis, non dignissim libero. diff --git a/docs/api-reference/endpoint/create.mdx b/docs/api-reference/endpoint/create.mdx new file mode 100644 index 000000000..7e7f86b9c --- /dev/null +++ b/docs/api-reference/endpoint/create.mdx @@ -0,0 +1,84 @@ +--- +title: "Create User" +api: "POST https://api.mintlify.com/api/user" +description: "This endpoint creates a new user" +--- + +### Body + + + This is the current user group token you have for the user group that you want + to rotate. + + +### Response + + + Indicates whether the call was successful. 1 if successful, 0 if not. + + + + +The contents of the user group + + + + + This is the internal ID for this user group. You don't need to record this + information, since you will not need to use it. + + + + This is the user group token (userGroupToken or USER_GROUP_TOKEN) that will be + used to identify which user group is viewing the dashboard. You should save + this on your end to use when rendering an embedded dashboard. + + + + This is the name of the user group provided in the request body. + + + + This is the user_group_id provided in the request body. + + + + This is the environment tag of the user group. Possible values are 'Customer' + and 'Testing'. User group id's must be unique to each environment, so you can + not create multiple user groups with with same id. If you have a production + customer and a test user group with the same id, you will be required to label + one as 'Customer' and another as 'Testing' + + + + + + + + +```bash Example Request +curl --location --request POST 'https://api.mintlify.com/api/user' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Token ' \ +--data-raw '{ + "current_token": "" +}' +``` + + + + + +```json Response +{ + "success": 1, + "user_group": { + "team_id": 3, + "token": "", + "name": "Example 1", + "provided_id": "example_1" + } +} +``` + + diff --git a/docs/api-reference/endpoint/delete.mdx b/docs/api-reference/endpoint/delete.mdx new file mode 100644 index 000000000..cb0eb8bbe --- /dev/null +++ b/docs/api-reference/endpoint/delete.mdx @@ -0,0 +1,47 @@ +--- +title: "Delete User" +api: "DELETE https://api.mintlify.com/api/user" +description: "This endpoint deletes an existing user." +--- + +### Body + + + The data source ID provided in the data tab may be used to identify the data + source for the user group + + + + This is the current user group token you have for the user group you want to + delete + + +### Response + + + Indicates whether the call was successful. 1 if successful, 0 if not. + + + + +```bash Example Request +curl --location --request DELETE 'https://api.mintlify.com/api/user' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Token ' \ +--data-raw '{ + "user_group_id": "example_1" + "current_token": "abcdef" +}' +``` + + + + + +```json Response +{ + "success": 1 +} +``` + + diff --git a/docs/api-reference/endpoint/get.mdx b/docs/api-reference/endpoint/get.mdx new file mode 100644 index 000000000..ce95f65bb --- /dev/null +++ b/docs/api-reference/endpoint/get.mdx @@ -0,0 +1,101 @@ +--- +title: "Get User" +api: "GET https://api.mintlify.com/api/user" +description: "This endpoint gets or creates a new user." +--- + +### Body + + + This is the name of the user group. + + + + This is the ID you use to identify this user group in your database. + + + + This is a JSON mapping of schema id to either the data source that this user group should be + associated with or id of the datasource you provided when creating it. + + + + This is a JSON object for properties assigned to this user group. These will be accessible through + variables in the dashboards and SQL editor + + +### Response + + + Indicates whether the call was successful. 1 if successful, 0 if not. + + + + Indicates whether a new user group was created. + + + + +The contents of the user group + + + + + This is the internal ID for this user group. You don't need to record this information, since + you will not need to use it. + + + + This is the user group token (userGroupToken or USER_GROUP_TOKEN) that will be used to identify + which user group is viewing the dashboard. You should save this on your end to use when rendering + an embedded dashboard. + + + + This is the name of the user group provided in the request body. + + + + This is the user_group_id provided in the request body. + + + + This is the properties object if it was provided in the request body + + + + + + + + +```bash Example Request +curl --location --request GET 'https://api.mintlify.com/api/user' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Token ' \ +--data-raw '{ + "user_group_id": "example_1", + "name": "Example 1", + "mapping": {"40": "213", "134": "386"}, + "properties": {"filterValue": "value"} +}' +``` + + + + + +```json Response +{ + "success": 1, + "new_user_group": true, + "user_group": { + "team_id": 3, + "token": "", + "name": "Example 1", + "provided_id": "example_1" + } +} +``` + + diff --git a/docs/api-reference/endpoint/update.mdx b/docs/api-reference/endpoint/update.mdx new file mode 100644 index 000000000..430498769 --- /dev/null +++ b/docs/api-reference/endpoint/update.mdx @@ -0,0 +1,101 @@ +--- +title: "Update User" +api: "PUT https://api.mintlify.com/api/user" +description: "This endpoint updates an existing user." +--- + +### Body + + + This is the name of the user group. + + + + This is the ID you use to identify this user group in your database. + + + + This is a JSON mapping of schema id to either the data source that this user + group should be associated with or id of the datasource you provided when + creating it. + + + + This is a JSON object for properties assigned to this user group. These will + be accessible through variables in the dashboards and SQL editor + + +### Response + + + Indicates whether the call was successful. 1 if successful, 0 if not. + + + + +The contents of the user group + + + + + Indicates whether a new user group was created. + + + + This is the user group token (userGroupToken or USER_GROUP_TOKEN) that will be + used to identify which user group is viewing the dashboard. You should save + this on your end to use when rendering an embedded dashboard. + + + + This is the name of the user group provided in the request body. + + + + This is the user_group_id provided in the request body. + + + + This is the properties object if it was provided in the request body + + + + This is the environment tag of the user group. Possible values are 'Customer' + and 'Testing' + + + + + + + + +```bash Example Request +curl --location --request PUT 'https://api.mintlify.com/api/user' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Token ' \ +--data-raw '{ + "user_group_id": "example_1", + "name": "Example 1", + "mapping": {"40": "213", "134": "386"}, + "properties": {"filterValue": "value"} +}' +``` + + + + + +```json Response +{ + "success": 1, + "user_group": { + "team_id": 113, + "token": "", + "name": "ok", + "provided_id": "6" + } +} +``` + + diff --git a/docs/essentials/code.mdx b/docs/essentials/code.mdx new file mode 100644 index 000000000..d2a462a7a --- /dev/null +++ b/docs/essentials/code.mdx @@ -0,0 +1,37 @@ +--- +title: 'Code Blocks' +description: 'Display inline code and code blocks' +icon: 'code' +--- + +## Basic + +### Inline Code + +To denote a `word` or `phrase` as code, enclose it in backticks (`). + +``` +To denote a `word` or `phrase` as code, enclose it in backticks (`). +``` + +### Code Block + +Use [fenced code blocks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks) by enclosing code in three backticks and follow the leading ticks with the programming language of your snippet to get syntax highlighting. Optionally, you can also write the name of your code after the programming language. + +```java HelloWorld.java +class HelloWorld { + public static void main(String[] args) { + System.out.println("Hello, World!"); + } +} +``` + +````md +```java HelloWorld.java +class HelloWorld { + public static void main(String[] args) { + System.out.println("Hello, World!"); + } +} +``` +```` diff --git a/docs/essentials/images.mdx b/docs/essentials/images.mdx new file mode 100644 index 000000000..4c1517777 --- /dev/null +++ b/docs/essentials/images.mdx @@ -0,0 +1,59 @@ +--- +title: 'Images and Embeds' +description: 'Add image, video, and other HTML elements' +icon: 'image' +--- + + + +## Image + +### Using Markdown + +The [markdown syntax](https://www.markdownguide.org/basic-syntax/#images) lets you add images using the following code + +```md +![title](/path/image.jpg) +``` + +Note that the image file size must be less than 5MB. Otherwise, we recommend hosting on a service like [Cloudinary](https://cloudinary.com/) or [S3](https://aws.amazon.com/s3/). You can then use that URL and embed. + +### Using Embeds + +To get more customizability with images, you can also use [embeds](/writing-content/embed) to add images + +```html + +``` + +## Embeds and HTML elements + + + +
+ + + +Mintlify supports [HTML tags in Markdown](https://www.markdownguide.org/basic-syntax/#html). This is helpful if you prefer HTML tags to Markdown syntax, and lets you create documentation with infinite flexibility. + + + +### iFrames + +Loads another HTML page within the document. Most commonly used for embedding videos. + +```html + +``` diff --git a/docs/essentials/markdown.mdx b/docs/essentials/markdown.mdx new file mode 100644 index 000000000..c8ad9c1f3 --- /dev/null +++ b/docs/essentials/markdown.mdx @@ -0,0 +1,88 @@ +--- +title: 'Markdown Syntax' +description: 'Text, title, and styling in standard markdown' +icon: 'text-size' +--- + +## Titles + +Best used for section headers. + +```md +## Titles +``` + +### Subtitles + +Best use to subsection headers. + +```md +### Subtitles +``` + + + +Each **title** and **subtitle** creates an anchor and also shows up on the table of contents on the right. + + + +## Text Formatting + +We support most markdown formatting. Simply add `**`, `_`, or `~` around text to format it. + +| Style | How to write it | Result | +| ------------- | ----------------- | --------------- | +| Bold | `**bold**` | **bold** | +| Italic | `_italic_` | _italic_ | +| Strikethrough | `~strikethrough~` | ~strikethrough~ | + +You can combine these. For example, write `**_bold and italic_**` to get **_bold and italic_** text. + +You need to use HTML to write superscript and subscript text. That is, add `` or `` around your text. + +| Text Size | How to write it | Result | +| ----------- | ------------------------ | ---------------------- | +| Superscript | `superscript` | superscript | +| Subscript | `subscript` | subscript | + +## Linking to Pages + +You can add a link by wrapping text in `[]()`. You would write `[link to google](https://google.com)` to [link to google](https://google.com). + +Links to pages in your docs need to be root-relative. Basically, you should include the entire folder path. For example, `[link to text](/writing-content/text)` links to the page "Text" in our components section. + +Relative links like `[link to text](../text)` will open slower because we cannot optimize them as easily. + +## Blockquotes + +### Singleline + +To create a blockquote, add a `>` in front of a paragraph. + +> Dorothy followed her through many of the beautiful rooms in her castle. + +```md +> Dorothy followed her through many of the beautiful rooms in her castle. +``` + +### Multiline + +> Dorothy followed her through many of the beautiful rooms in her castle. +> +> The Witch bade her clean the pots and kettles and sweep the floor and keep the fire fed with wood. + +```md +> Dorothy followed her through many of the beautiful rooms in her castle. +> +> The Witch bade her clean the pots and kettles and sweep the floor and keep the fire fed with wood. +``` + +### LaTeX + +Mintlify supports [LaTeX](https://www.latex-project.org) through the Latex component. + +8 x (vk x H1 - H2) = (0,1) + +```md +8 x (vk x H1 - H2) = (0,1) +``` diff --git a/docs/essentials/navigation.mdx b/docs/essentials/navigation.mdx new file mode 100644 index 000000000..ca44bb645 --- /dev/null +++ b/docs/essentials/navigation.mdx @@ -0,0 +1,66 @@ +--- +title: 'Navigation' +description: 'The navigation field in mint.json defines the pages that go in the navigation menu' +icon: 'map' +--- + +The navigation menu is the list of links on every website. + +You will likely update `mint.json` every time you add a new page. Pages do not show up automatically. + +## Navigation syntax + +Our navigation syntax is recursive which means you can make nested navigation groups. You don't need to include `.mdx` in page names. + + + +```json Regular Navigation +"navigation": [ + { + "group": "Getting Started", + "pages": ["quickstart"] + } +] +``` + +```json Nested Navigation +"navigation": [ + { + "group": "Getting Started", + "pages": [ + "quickstart", + { + "group": "Nested Reference Pages", + "pages": ["nested-reference-page"] + } + ] + } +] +``` + + + +## Folders + +Simply put your MDX files in folders and update the paths in `mint.json`. + +For example, to have a page at `https://yoursite.com/your-folder/your-page` you would make a folder called `your-folder` containing an MDX file called `your-page.mdx`. + + + +You cannot use `api` for the name of a folder unless you nest it inside another folder. Mintlify uses Next.js which reserves the top-level `api` folder for internal server calls. A folder name such as `api-reference` would be accepted. + + + +```json Navigation With Folder +"navigation": [ + { + "group": "Group Name", + "pages": ["your-folder/your-page"] + } +] +``` + +## Hidden Pages + +MDX files not included in `mint.json` will not show up in the sidebar but are accessible through the search bar and by linking directly to them. diff --git a/docs/essentials/settings.mdx b/docs/essentials/settings.mdx new file mode 100644 index 000000000..ae6e7d6ab --- /dev/null +++ b/docs/essentials/settings.mdx @@ -0,0 +1,318 @@ +--- +title: 'Global Settings' +description: 'Mintlify gives you complete control over the look and feel of your documentation using the mint.json file' +icon: 'gear' +--- + +Every Mintlify site needs a `mint.json` file with the core configuration settings. Learn more about the [properties](#properties) below. + +## Properties + + +Name of your project. Used for the global title. + +Example: `mintlify` + + + + + An array of groups with all the pages within that group + + + The name of the group. + + Example: `Settings` + + + + The relative paths to the markdown files that will serve as pages. + + Example: `["customization", "page"]` + + + + + + + + Path to logo image or object with path to "light" and "dark" mode logo images + + + Path to the logo in light mode + + + Path to the logo in dark mode + + + Where clicking on the logo links you to + + + + + + Path to the favicon image + + + + Hex color codes for your global theme + + + The primary color. Used for most often for highlighted content, section + headers, accents, in light mode + + + The primary color for dark mode. Used for most often for highlighted + content, section headers, accents, in dark mode + + + The primary color for important buttons + + + The color of the background in both light and dark mode + + + The hex color code of the background in light mode + + + The hex color code of the background in dark mode + + + + + + + + Array of `name`s and `url`s of links you want to include in the topbar + + + The name of the button. + + Example: `Contact us` + + + The url once you click on the button. Example: `https://mintlify.com/contact` + + + + + + + + + Link shows a button. GitHub shows the repo information at the url provided including the number of GitHub stars. + + + If `link`: What the button links to. + + If `github`: Link to the repository to load GitHub information from. + + + Text inside the button. Only required if `type` is a `link`. + + + + + + + Array of version names. Only use this if you want to show different versions + of docs with a dropdown in the navigation bar. + + + + An array of the anchors, includes the `icon`, `color`, and `url`. + + + The [Font Awesome](https://fontawesome.com/search?s=brands%2Cduotone) icon used to feature the anchor. + + Example: `comments` + + + The name of the anchor label. + + Example: `Community` + + + The start of the URL that marks what pages go in the anchor. Generally, this is the name of the folder you put your pages in. + + + The hex color of the anchor icon background. Can also be a gradient if you pass an object with the properties `from` and `to` that are each a hex color. + + + Used if you want to hide an anchor until the correct docs version is selected. + + + Pass `true` if you want to hide the anchor until you directly link someone to docs inside it. + + + One of: "brands", "duotone", "light", "sharp-solid", "solid", or "thin" + + + + + + + Override the default configurations for the top-most anchor. + + + The name of the top-most anchor + + + Font Awesome icon. + + + One of: "brands", "duotone", "light", "sharp-solid", "solid", or "thin" + + + + + + An array of navigational tabs. + + + The name of the tab label. + + + The start of the URL that marks what pages go in the tab. Generally, this + is the name of the folder you put your pages in. + + + + + + Configuration for API settings. Learn more about API pages at [API Components](/api-playground/demo). + + + The base url for all API endpoints. If `baseUrl` is an array, it will enable for multiple base url + options that the user can toggle. + + + + + + The authentication strategy used for all API endpoints. + + + The name of the authentication parameter used in the API playground. + + If method is `basic`, the format should be `[usernameName]:[passwordName]` + + + The default value that's designed to be a prefix for the authentication input field. + + E.g. If an `inputPrefix` of `AuthKey` would inherit the default input result of the authentication field as `AuthKey`. + + + + + + Configurations for the API playground + + + + Whether the playground is showing, hidden, or only displaying the endpoint with no added user interactivity `simple` + + Learn more at the [playground guides](/api-playground/demo) + + + + + + Enabling this flag ensures that key ordering in OpenAPI pages matches the key ordering defined in the OpenAPI file. + + This behavior will soon be enabled by default, at which point this field will be deprecated. + + + + + + + A string or an array of strings of URL(s) or relative path(s) pointing to your + OpenAPI file. + + Examples: + + ```json Absolute + "openapi": "https://example.com/openapi.json" + ``` + ```json Relative + "openapi": "/openapi.json" + ``` + ```json Multiple + "openapi": ["https://example.com/openapi1.json", "/openapi2.json", "/openapi3.json"] + ``` + + + + + + An object of social media accounts where the key:property pair represents the social media platform and the account url. + + Example: + ```json + { + "twitter": "https://twitter.com/mintlify", + "website": "https://mintlify.com" + } + ``` + + + One of the following values `website`, `facebook`, `twitter`, `discord`, `slack`, `github`, `linkedin`, `instagram`, `hacker-news` + + Example: `twitter` + + + The URL to the social platform. + + Example: `https://twitter.com/mintlify` + + + + + + Configurations to enable feedback buttons + + + + Enables a button to allow users to suggest edits via pull requests + + + Enables a button to allow users to raise an issue about the documentation + + + + + + Customize the dark mode toggle. + + + Set if you always want to show light or dark mode for new users. When not + set, we default to the same mode as the user's operating system. + + + Set to true to hide the dark/light mode toggle. You can combine `isHidden` with `default` to force your docs to only use light or dark mode. For example: + + + ```json Only Dark Mode + "modeToggle": { + "default": "dark", + "isHidden": true + } + ``` + + ```json Only Light Mode + "modeToggle": { + "default": "light", + "isHidden": true + } + ``` + + + + + + + + + A background image to be displayed behind every page. See example with + [Infisical](https://infisical.com/docs) and [FRPC](https://frpc.io). + diff --git a/docs/favicon.png b/docs/favicon.png new file mode 100644 index 000000000..be0d22192 Binary files /dev/null and b/docs/favicon.png differ diff --git a/docs/images/analyze-book-types.png b/docs/images/analyze-book-types.png new file mode 100644 index 000000000..013df19ac Binary files /dev/null and b/docs/images/analyze-book-types.png differ diff --git a/docs/images/analyzebook/analyze-book-variant-full.png b/docs/images/analyzebook/analyze-book-variant-full.png new file mode 100644 index 000000000..5d6a08b3c Binary files /dev/null and b/docs/images/analyzebook/analyze-book-variant-full.png differ diff --git a/docs/images/analyzebook/booktypes.png b/docs/images/analyzebook/booktypes.png new file mode 100644 index 000000000..a98d9eaf6 Binary files /dev/null and b/docs/images/analyzebook/booktypes.png differ diff --git a/docs/images/analyzebook/method.png b/docs/images/analyzebook/method.png new file mode 100644 index 000000000..36e835cc3 Binary files /dev/null and b/docs/images/analyzebook/method.png differ diff --git a/docs/images/analyzebook/prompt-img.png b/docs/images/analyzebook/prompt-img.png new file mode 100644 index 000000000..1ddfe7bcc Binary files /dev/null and b/docs/images/analyzebook/prompt-img.png differ diff --git a/docs/images/basic-prompt.png b/docs/images/basic-prompt.png new file mode 100644 index 000000000..4cc9c0936 Binary files /dev/null and b/docs/images/basic-prompt.png differ diff --git a/docs/images/book-test-input-structure.png b/docs/images/book-test-input-structure.png new file mode 100644 index 000000000..4968c0fce Binary files /dev/null and b/docs/images/book-test-input-structure.png differ diff --git a/docs/images/book-types.png b/docs/images/book-types.png new file mode 100644 index 000000000..5a1832921 Binary files /dev/null and b/docs/images/book-types.png differ diff --git a/docs/images/checks-passed.png b/docs/images/checks-passed.png new file mode 100644 index 000000000..3303c7736 Binary files /dev/null and b/docs/images/checks-passed.png differ diff --git a/docs/images/classify-bool-variant.png b/docs/images/classify-bool-variant.png new file mode 100644 index 000000000..d2817c583 Binary files /dev/null and b/docs/images/classify-bool-variant.png differ diff --git a/docs/images/classify-sentiment-bool-llm-variant.png b/docs/images/classify-sentiment-bool-llm-variant.png new file mode 100644 index 000000000..7bba8821c Binary files /dev/null and b/docs/images/classify-sentiment-bool-llm-variant.png differ diff --git a/docs/images/control-space-suggestions.png b/docs/images/control-space-suggestions.png new file mode 100644 index 000000000..7a3ef934d Binary files /dev/null and b/docs/images/control-space-suggestions.png differ diff --git a/docs/images/dashboardtest1.png b/docs/images/dashboardtest1.png new file mode 100644 index 000000000..96c3fa07e Binary files /dev/null and b/docs/images/dashboardtest1.png differ diff --git a/docs/images/extension.png b/docs/images/extension.png new file mode 100644 index 000000000..edec4b919 Binary files /dev/null and b/docs/images/extension.png differ diff --git a/docs/images/extract-verbs/extract-verbs-nouns-example.png b/docs/images/extract-verbs/extract-verbs-nouns-example.png new file mode 100644 index 000000000..7a47c5e72 Binary files /dev/null and b/docs/images/extract-verbs/extract-verbs-nouns-example.png differ diff --git a/docs/images/extract-verbs/extract-verbs-prompt-dash.png b/docs/images/extract-verbs/extract-verbs-prompt-dash.png new file mode 100644 index 000000000..df22ee702 Binary files /dev/null and b/docs/images/extract-verbs/extract-verbs-prompt-dash.png differ diff --git a/docs/images/extract-verbs/stringify2.png b/docs/images/extract-verbs/stringify2.png new file mode 100644 index 000000000..8007c668f Binary files /dev/null and b/docs/images/extract-verbs/stringify2.png differ diff --git a/docs/images/extraction-tests.png b/docs/images/extraction-tests.png new file mode 100644 index 000000000..0df941545 Binary files /dev/null and b/docs/images/extraction-tests.png differ diff --git a/docs/images/fantasy-test.png b/docs/images/fantasy-test.png new file mode 100644 index 000000000..cff2d2818 Binary files /dev/null and b/docs/images/fantasy-test.png differ diff --git a/docs/images/function-classify-bool.png b/docs/images/function-classify-bool.png new file mode 100644 index 000000000..00a83ba8e Binary files /dev/null and b/docs/images/function-classify-bool.png differ diff --git a/docs/images/generated-hint.png b/docs/images/generated-hint.png new file mode 100644 index 000000000..770e4a7a6 Binary files /dev/null and b/docs/images/generated-hint.png differ diff --git a/docs/images/glooinit.png b/docs/images/glooinit.png new file mode 100644 index 000000000..01b36da21 Binary files /dev/null and b/docs/images/glooinit.png differ diff --git a/docs/images/hero-dark.svg b/docs/images/hero-dark.svg new file mode 100644 index 000000000..59ab09758 --- /dev/null +++ b/docs/images/hero-dark.svg @@ -0,0 +1,136 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/hero-light.svg b/docs/images/hero-light.svg new file mode 100644 index 000000000..9db54d9c0 --- /dev/null +++ b/docs/images/hero-light.svg @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/llm-clients.png b/docs/images/llm-clients.png new file mode 100644 index 000000000..e313abbc4 Binary files /dev/null and b/docs/images/llm-clients.png differ diff --git a/docs/images/prompt-author-method.png b/docs/images/prompt-author-method.png new file mode 100644 index 000000000..b8141389d Binary files /dev/null and b/docs/images/prompt-author-method.png differ diff --git a/docs/images/running-yourself.png b/docs/images/running-yourself.png new file mode 100644 index 000000000..68e43d6fd Binary files /dev/null and b/docs/images/running-yourself.png differ diff --git a/docs/images/sentiment-test-validate.png b/docs/images/sentiment-test-validate.png new file mode 100644 index 000000000..89160f326 Binary files /dev/null and b/docs/images/sentiment-test-validate.png differ diff --git a/docs/images/sentiment-tests-1.png b/docs/images/sentiment-tests-1.png new file mode 100644 index 000000000..a839cf9cf Binary files /dev/null and b/docs/images/sentiment-tests-1.png differ diff --git a/docs/images/sentiment-tests.png b/docs/images/sentiment-tests.png new file mode 100644 index 000000000..a0c0d44ef Binary files /dev/null and b/docs/images/sentiment-tests.png differ diff --git a/docs/images/test-group-validate.png b/docs/images/test-group-validate.png new file mode 100644 index 000000000..1abf51946 Binary files /dev/null and b/docs/images/test-group-validate.png differ diff --git a/docs/images/test-group1.png b/docs/images/test-group1.png new file mode 100644 index 000000000..4b9ec458b Binary files /dev/null and b/docs/images/test-group1.png differ diff --git a/docs/images/test-run-bool.png b/docs/images/test-run-bool.png new file mode 100644 index 000000000..b8736020f Binary files /dev/null and b/docs/images/test-run-bool.png differ diff --git a/docs/logo/dark.svg b/docs/logo/dark.svg new file mode 100644 index 000000000..db4cf22b0 --- /dev/null +++ b/docs/logo/dark.svg @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/logo/favicon.png b/docs/logo/favicon.png new file mode 100644 index 000000000..be0d22192 Binary files /dev/null and b/docs/logo/favicon.png differ diff --git a/docs/logo/light.svg b/docs/logo/light.svg new file mode 100644 index 000000000..c569a6587 --- /dev/null +++ b/docs/logo/light.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/mdx/concepts.mdx b/docs/mdx/concepts.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/examples/chat-bot.mdx b/docs/mdx/examples/chat-bot.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/examples/classification.mdx b/docs/mdx/examples/classification.mdx new file mode 100644 index 000000000..f4f893063 --- /dev/null +++ b/docs/mdx/examples/classification.mdx @@ -0,0 +1,235 @@ +--- +title: "Classification" +--- + +# Use cases + +What types of problems are classification problems? +- Deciding which tools an AI agent should use +- Sentiment analysis +- Labeling emails +- Spam detection +- Customer Intent detection + +## Walkthrough - Creating a Chatbot that uses tools +Let's say we're writing an AI agent similar to Siri. We want to be able to: +- Book meetings +- Ask about our availability +- Set reminders + +To do this we need to decompose our Chatbot pipeline so that we always perform a classification step +to figure out what the chatbot should do. Splitting your LLM prompts into subtasks will make it easier to test changes over time. + +General chatbot architecture: + +User message -> Classify message -> Use tool, or respond back + +### Setup + +#### Define your classification function in Gloo configs +If you have the [VSCode extension](/mdx/installation#gloo-extension), **.gloo** files will have syntax highlighting! +``` gloo main.gloo +@function ClassifyMessage { + @input string + @output Category +} + +@enum Category { + BOOK_MEETING + AVALABILITY_QUERY + SET_REMINDER +} +``` + +#### Define an LLM variant +Now we have to define an `LLM variant` for that function, which defines how the function is executed. +In the future, you could replace this with a different model, instead of an LLM. + +An LLM variant has three things +1. [required] An LLM client +2. [required] A prompt +3. [optional] output schema descriptions (or `@stringify`) + +Feel free to add any of these objects in any .gloo file. All objects are imported globally and can be referenced from anywhere. + +```gloo main.gloo +@client[llm] GPT35Client { + @provider openai + model gpt-3.5-turbo + temperature 0 + api_key @ENV.OPENAI_API_KEY +} + +@variant[llm] v1 for ClassifyMessage { + @client[llm] GPT35Client + @prompt { + Given a message, classify it into one of the following categories: BOOK_MEETING, AVALABILITY_QUERY, SET_REMINDER + + Message: + --- + {@input} + --- + + Classification: + } +} + +``` + +Note how we can inject the function's **@input** variable into the prompt by writing `{@input}`. +If your **@input** was an object, you'd also be able to write `{@input.property}`. See our entity extraction. + +#### Run your function +Every time you save a .gloo file, the Gloo CLI will generate relevant python code under your desired output folder. + +Since our output folder is configured as "generated" in our gloo.yaml file in our project we can import from there. + +``` python app/pipeline.py +from generated.functions import ClassifyMessage +import asyncio + +async def call_sentiment_fn(): + res = await ClassifyMessage("v1", args="Can I schedule an meeting?") + print(res) + ## Response is typed as a Category + + +def call_sentiment_sync(): + asyncio.run(ClassifyMessage("v1", args="Can I schedule a meeting?")) + +``` + +### Improving our prompt with enum aliases and descriptions +You probably want to add some descriptions as to what each class actually means, or change the enum name that gets injected in the prompt for better results. + +In our case, we will rename all our classes to symbols, such as "K1, K2, K3" so that the LLM can focus on the class descriptions we provide more than the names of them. This is an interesting technique called "Symbol tuning". + +To do this we will leverage a unique property called **@stringify**. @stringify basically means "define how this class or object will be described to the LLM". + See below how it's used: + +```diff main.gloo +@variant[llm] v1 for ClassifyMessage { + @client[llm] GPT35Client ++ @stringify Category { ++ BOOK_MEETING ++ @rename{k1} ++ @describe{ ++ When the user wants to book a meeting on our internal system. ++ } ++ ++ AVAILABILITY_QUERY ++ @rename{k2} ++ @describe{ ++ When the user is asking for their calendar availability. ++ } ++ ++ SET_REMINDER ++ @rename{k3} ++ @describe{ ++ When the user wants to set a calendar reminder. ++ } ++ } + @prompt { + Given a message, classify it into one of the following categories: ++ {@Category.values} + + Message: + --- + {@input} + --- + + Classification: + } +} +``` +We added two things here: +1. **@stringify** definition for our **Category** enum, with the new aliases and descriptions. +2. Used Gloo's **\{@Category.values\}** unique enum property, which writes down each enum and its description in the prompt. You can only use `.values` on enums to list them in your prompt. + +**Resulting prompt** +``` +Given a message, classify it into one of the following categories: +k1: When the user wants to book a meeting on our internal system. +k2: When the user is asking for their calendar availability. +k3: When the user wants to set a calendar reminder. + +Message: +I want to set a reminder + +Classification: +``` + +Gloo will automatically parse the symbols you added back into each enum when you run your code. No need to change the rest of your code! + +### Final code + + +```gloo main.gloo +@function ClassifyMessage { + @input string + @output Category +} + +@enum Category { + BOOK_MEETING + AVALABILITY_QUERY + SET_REMINDER +} + +@variant[llm] v1 for ClassifyMessage { + @client[llm] GPT35Client + @stringify Category { + BOOK_MEETING + @rename{k1} + @describe{ + When the user wants to book a meeting on our internal system. + } + + AVAILABILITY_QUERY + @rename{k2} + @describe{ + When the user is asking for their calendar availability. + } + + SET_REMINDER + @rename{k3} + @describe{ + When the user wants to set a calendar reminder. + } + } + @prompt { + Given a message, classify it into one of the following categories: + {@Category.values} + + Message: + --- + {@input} + --- + + Classification: + } +} +``` + +```gloo clients.gloo +@client[llm] GPT35Client { + @provider openai + model gpt-3.5-turbo + temperature 0 + api_key @ENV.OPENAI_API_KEY +} +``` + +```python app/pipeline.py +from generated.functions import ClassifyMessage +import asyncio + +async def call_sentiment_fn(): + res = await ClassifyMessage("v1", args="Can I schedule an meeting?") + print(res) + ## Prints Category.BOOK_MEETING + +if __name__ == "__main__": + asyncio.run(call_sentiment_fn()) +``` + \ No newline at end of file diff --git a/docs/mdx/examples/extraction.mdx b/docs/mdx/examples/extraction.mdx new file mode 100644 index 000000000..22a7961fc --- /dev/null +++ b/docs/mdx/examples/extraction.mdx @@ -0,0 +1,419 @@ +--- +title: "Entity Extraction" +--- + +# Use cases + +What types of problems are entity extraction problems? +- Filling out a form with a chat bot +- Getting specific details out of long form text +- Calling a function in your code with some parameters after analyzing some text (like OpenAI Functions) + +## Walkthrough +We'll be continuing our example of a Siri-like agent from the [Classification](../classification) tutorial. + +As a reminder: +We want to be able to: +- Book meetings +- Ask about our availability +- Set reminders + +For this tutorial, we'll be focusing on booking meetings. + +### Setup +First we need to define our inputs and outputs. + +To do that, we'll first define our function: + +The function is named `ExtractMeetingDetails` and takes in a string and outputs a `MeetingDetails` object. + +```gloo meetings.gloo +@function ExtractMeetingDetails { + @input string + @output MeetingDetails +} +``` + +Next we'll define our `MeetingDetails` type: +```gloo meetings.gloo +@class MeetingDetails { + title string + start_time string + end_time string + attendees string[] +} +``` + +### Prompt definition + +Now, we'll define how we call the LLM to extract the meeting details. + +We'll need to define a `@variant` of `ExtractMeetingDetails` that uses the `GPT35Client` to call the LLM. + +We'll add the variant directly to our `meetings.gloo` file and define the `GPT35Client` in `clients.gloo`. + + + +```gloo meetings.gloo +@variant[llm] v1 for ExtractMeetingDetails { + @client[llm] GPT35Client + @prompt { + Given the user message, extract relevant details. + + Message: {@input} + + Output JSON: + {@output.json} + + JSON: + } +} +``` +```gloo clients.gloo +@client[llm] GPT35Client { + @provider openai + model gpt-3.5-turbo + temperature 0 + api_key @ENV.OPENAI_API_KEY +} +``` + + +This prompt generates the following prompt: +``` +Given the user message, extract relevant details. + +Message: {@input} + +Output JSON: +{ + "title": string, + "start_time": string, + "end_time": string, + "attendees": string[] +} +``` + +### Using ExtractMeetingDetails in python +Now that we've defined our function, we can use it in any python code. + +```python main.py +import asyncio +from generated.function import ExtractMeetingDetails +from gloo_py.stringify import StringifyError + +async def main(): + message = 'Book a meeting with John at 3pm tomorrow.' + try: + meeting_details = await ExtractMeetingDetails('v1', message) + # Note that meeting_details is a MeetingDetails object. + print(meeting_details) + # Prints: + # MeetingDetails(title='Meeting with John', start_time='3pm', end_time='4pm', attendees=['John']) + except StringifyError: + # You can capture any parsing errors here. + print('Failed to parse meeting details.') + +if __name__ == '__main__': + asyncio.run(main()) +``` + +### Using nested objects + +When it comes to adding more complex types, you can use nested objects. For example, for attendees, we may want to get the name and email of each attendee. +All we'd have to do is update our `MeetingDetails` class to include a nested `Attendee` class. However, we should ensure that the `email` field is optional +as the user may not always provide an email. + +```gloo meetings.gloo + +@class MeetingDetails { + title string + start_time string + end_time string + attendees Attendee[] +} + +@class Attendee { + name string + email string? +} +``` + +Now, our prompt would automatically update to: +``` +Given the user message, extract relevant details. + +Message: {@input} + +Output JSON: +{ + "title": string, + "start_time": string, + "end_time": string, + "attendees": { + "name": string, + "email": string | null + }[] +} +``` + +### Getting ISO dates +In order to call the google calendar API, we can't just get '3pm' as the start time. We need to get an ISO date string, and it needs to know what today is. + +We'll do this in two different ways: +#### Option 1. Complex input type +We can simply change the input type of our function to be a `MeetingRequest` object. + +```gloo meetings.gloo +@class MeetingRequest { + message string + today string +} + +@function ExtractMeetingDetails { + @input MeetingRequest + @output MeetingDetails +} +``` + +Then we'd update the variant to use the `message` and `today` fields. + +```gloo meetings.gloo +@variant[llm] v1 for ExtractMeetingDetails { + @client[llm] GPT35Client + @prompt { + Given the user message, extract relevant details. + + Message: {@input.message} + + Today: {@input.today} + + Output JSON: + {@output.json} + + JSON: + } +} +``` + +Note that you're able to access nested fields using the dot notation. + +The generated prompt here would be: +``` +Given the user message, extract relevant details. + +Message: {@input.message} + +Today: {@input.today} + +Output JSON: +{ + "title": string, + "start_time": string, + "end_time": string, + "attendees": { + "name": string, + "email": string | null + }[] +} +``` + +To use the function, I would simply do: +```python main.py +from datetime import datetime +from generated.functions import ExtractMeetingDetails +from generated.custom_types import MeetingRequest + + # ... + meeting_details = await ExtractMeetingDetails('v1', MeetingRequest( + message='Book a meeting with John at 3pm tomorrow.', + today=datetime.now().isoformat() + )) +``` + +However this still may not be enough. What if we want to get the start time and end time as ISO dates as well? +```diff meeting.gloo + @variant[llm] v1 for ExtractMeetingDetails { + @client[llm] GPT35Client ++ @stringify MeetingDetails { ++ start_time @describe{ISO date string} ++ end_time @describe{ISO date string} ++ } +... +} +``` + +This would generate the following prompt: +```diff + Given the user message, extract relevant details. + + Message: {@input.message} + + Today: {@input.today} + + Output JSON: + { + "title": string, +- "start_time": string, +- "end_time": string, ++ "start_time": ISO date string, ++ "end_time": ISO date string, + "attendees": { + "name": string, + "email": string | null + }[] + } +``` + +#### Option 2. Using custom_vars +Another option is to use custom variables directly in the prompt. + +```diff meetings.gloo +@function ExtractMeetingDetails { + @input string + @output MeetingDetails +} + +@variant[llm] v1 for ExtractMeetingDetails { + @client[llm] GPT35Client + ++ @method custom_vars { ++ @lang[py] { ++ from datetime import datetime ++ ++ def custom_vars() -> typing.Dict[str, str]: ++ return { ++ 'today': datetime.now().isoformat() ++ } ++ } ++ } + + @prompt { + Given the user message, extract relevant details. + + Message: {@input} + ++ Today: {@today} + + Output JSON: + {@output.json} + + JSON: + } +} +``` + +Now, you no longer have to change the caller code, and you can use the function like this: +```python main.py +from generated.functions import ExtractMeetingDetails + + # ... + meeting_details = await ExtractMeetingDetails('v1', + 'Book a meeting with John at 3pm tomorrow.') +``` + +## Final code + + +```python app/pipeline.py +import asyncio +# ClassifyMessage and Category are from the classification tutorial. +from generated.function import ExtractMeetingDetails, ClassifyMessage +from generated.custom_types import Category +from gloo_py.stringify import StringifyError + +async def handle_meeting(message: str): + try: + meeting_details = await ExtractMeetingDetails('v1', message) + except StringifyError: + # You can capture any parsing errors here. + print('Failed to parse meeting details.') + return + + # Note that meeting_details is a MeetingDetails object. + print(meeting_details) + # Prints: + # MeetingDetails( + # title='Meeting with John', + # start_time='', + # end_time='', + # attendees=[Attendee(name='John', email=None)] + #) + + # Write code to call some API to book a meeting with + # meeting_details. + + +async def main(): + message = 'Book a meeting with John at 3pm tomorrow.' + + category = await ClassifyMessage('v1', message) + + if category == Category.MEETING: + await handle_meeting(message) + elif category == Category.AVAILABILITY: + # ... + elif category == Category.REMINDER: + # ... + +if __name__ == '__main__': + asyncio.run(main()) +``` + +```gloo meetings.gloo +@class MeetingDetails { + title string + start_time string + end_time string + attendees Attendee[] +} + +@class Attendee { + name string + email string +} + +@function ExtractMeetingDetails { + @input string + @output MeetingDetails +} + +@variant[llm] v1 for ExtractMeetingDetails { + @client[llm] GPT35Client + + @method custom_vars { + @lang[py] { + from datetime import datetime + + def custom_vars() -> typing.Dict[str, str]: + return { + 'today': datetime.now().isoformat() + } + } + } + + @prompt { + Given the user message, extract relevant details. + + Message: {@input} + + Today: {@today} + + Output JSON: + {@output.json} + + JSON: + } +} +``` + +```gloo clients.gloo +@client[llm] GPT35Client { + @provider openai + model gpt-3.5-turbo + temperature 0 + api_key @ENV.OPENAI_API_KEY +} +``` + + \ No newline at end of file diff --git a/docs/mdx/examples/q-and-a.mdx b/docs/mdx/examples/q-and-a.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/examples/summarization.mdx b/docs/mdx/examples/summarization.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/examples/translation.mdx b/docs/mdx/examples/translation.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/installation.mdx b/docs/mdx/installation.mdx new file mode 100644 index 000000000..9e99d3482 --- /dev/null +++ b/docs/mdx/installation.mdx @@ -0,0 +1,82 @@ +--- +title: "Installation" +--- + +## Gloo Compiler + +You'll need to install the gloo compiler -- `gloo` -- to your path. + + + + +```bash Mac +brew tap gloohq/gloo +brew install gloo +``` + + +```bash Linux (debian) +curl -fsSL https://raw.githubusercontent.com/GlooHQ/homebrew-gloo/main/install-gloo.sh | bash +``` + +```bash Windows (via scoop) +# You can install scoop at https://scoop.sh/ +scoop bucket add gloo-bucket https://github.com/gloohq/homebrew-gloo +scoop install gloo +``` + + +### Update instructions + + + +```bash Mac +gloo update +``` + + +```bash Linux (debian) +curl -fsSL https://raw.githubusercontent.com/GlooHQ/homebrew-gloo/main/install-gloo.sh | bash +``` + +```bash Windows (via scoop) +scoop update +scoop update gloo +``` + + + +## Gloo Extension + +Use our [VSCode Exension](https://marketplace.visualstudio.com/items?itemName=gloo.gloo) for syntax highlighting, auto building `.gloo` files, and inline syntax errors. + +It also works with [Cursor](https://cursor.so/)! + +If you don't use the VSCode extension, you'll need to run: `gloo build` (no args) every time you edit a `.gloo` file. + + + +### Initialize Gloo in your project + +``` +gloo init +``` + +This will create a gloo.yaml file + +![](/images/glooinit.png) + +### Folder structure + +``` +. +|-- gloo.yaml +├── app +│ └── your application files +├── gloo +│ └── your gloo files live here +├── pyproject.toml +└── generated + └── translated, runnable python code from .gloo. + Don't edit these files. +``` \ No newline at end of file diff --git a/docs/mdx/overview.mdx b/docs/mdx/overview.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/quickstart.mdx b/docs/mdx/quickstart.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/ref/class.mdx b/docs/mdx/ref/class.mdx new file mode 100644 index 000000000..7921c2258 --- /dev/null +++ b/docs/mdx/ref/class.mdx @@ -0,0 +1,109 @@ +--- +title: "@class" +--- + + +Classes consist of a name, and a list of properties and their types. + + +```gloo Gloo +@class Foo { + property1 string + property2 int? + property3 Bar[] + property4 MyEnum +} +``` + +```python Python Equivalent +from pydantic import BaseModel +from path.to.bar import Bar +from path.to.my_enum import MyEnum + +class Foo(BaseModel): + property1: str + property2: Optional[int]= None + property3: List[Bar] + property4: MyEnum +``` + +```Typescript Typescript Equivalent +import z from 'zod'; +import { BarZod } from './path/to/bar'; +import { MyEnumZod } from './path/to/my_enum'; + +const FooZod = z.object({ + property1: z.string(), + property2: z.number().int().nullable().optional(), + property3: z.array(BarZod), + property4: MyEnumZod, +}); + +type Foo = z.infer; +``` + + +## Inheritance +Not supported yet + +## Properties +Classes may have any number of properties. +Property names must follow: +1. Must start with a letter +2. Must contain only letters, numbers, and underscores +3. Must be unique within the class + +The type of a property can be any [supported type](/type) + +### Default values +Properties don't yet support default values. + +We're proposing different options, please let us know which you prefer! + + +```gloo Proposed Syntax - 1 +@class Foo { + property1 string + property2 int?= null + property3 Bar[]= [] + property4 MyEnum= MyEnum.A +} +``` + +```gloo Proposed Syntax - 2 +@class Foo { + property1 string + property2 int? @default{null} + property3 Bar[] @default{[]} + property4 MyEnum @default{MyEnum.A} +} +``` + + +```python Python Equivalent +from pydantic import BaseModel +from path.to.bar import Bar +from path.to.my_enum import MyEnum + +class Foo(BaseModel): + property1: str + property2: Optional[int] = None + property3: List[Bar] = [] + property4: MyEnum = MyEnum.A +``` + +```Typescript Typescript Equivalent +import z from 'zod'; +import { BarZod } from './path/to/bar'; +import { MyEnumZod, MyEnum } from './path/to/my_enum'; + +const FooZod = z.object({ + property1: z.string(), + property2: z.number().int().nullable().optional().default(null), + property3: z.array(BarZod).default([]), + property4: MyEnumZod.default(MyEnum.A), +}); + +type Foo = z.infer; +``` + \ No newline at end of file diff --git a/docs/mdx/ref/client-code.mdx b/docs/mdx/ref/client-code.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/ref/client-llm.mdx b/docs/mdx/ref/client-llm.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/ref/client-pmodel.mdx b/docs/mdx/ref/client-pmodel.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/ref/enum.mdx b/docs/mdx/ref/enum.mdx new file mode 100644 index 000000000..f0866e10f --- /dev/null +++ b/docs/mdx/ref/enum.mdx @@ -0,0 +1,34 @@ +--- +title: "@enum" +--- + +To define your own custom enum in Gloo: + + +```gloo Gloo +@enum MyEnum { + Value1 + Value2 + Value3 +} +``` + +```python Python Equivalent +from enum import StrEnum + +class MyEnum(StrEnum): + Value1 = "Value1" + Value2 = "Value2" + Value3 = "Value3" +``` + +```Typescript Typescript Equivalent +enum MyEnum { + Value1 = "Value1", + Value2 = "Value2", + Value3 = "Value3", +} +``` + + +You may have as many values as you'd like. Values may not be duplicated or empty. Values may not contain spaces or special characters and must not start with a number. \ No newline at end of file diff --git a/docs/mdx/ref/function.mdx b/docs/mdx/ref/function.mdx new file mode 100644 index 000000000..cc7d2584b --- /dev/null +++ b/docs/mdx/ref/function.mdx @@ -0,0 +1,39 @@ +--- +title: "@function" +--- + +A function is a definition of an input and output. + +The implementation of the function is described in a [variant](/mdx/ref/variant-llm). + +We specifically seprate out functions and their defintions because in machine learnings, +its often common to swap out model parameters, what model architecture is used, or even +if you're using an LLM vs a Heuristic vs a combination of both to get the answer. + +With gloo, you can swap out the function definition and keep the rest of the code that uses the +function the same. + + +```gloo Gloo +@function GetSentiment { + @input string + @output string +} +``` + +```python Python Equivalent +from typing import Callable + +GetSentiment = Callable[[str], str] +``` + +```typescript Typescript Equivalent +type GetSentiment = (str: string) => string; +``` + + +## Properties +| Property | Type | Description | Required | +| -------- | ---- | ----------- | -------- | +| `@input` | Any [type](/docs/types) | The input type of the function | YES | +| `@output` | Any [type](/docs/types) | The output type of the function | YES| \ No newline at end of file diff --git a/docs/mdx/ref/method.mdx b/docs/mdx/ref/method.mdx new file mode 100644 index 000000000..4a1ad558e --- /dev/null +++ b/docs/mdx/ref/method.mdx @@ -0,0 +1,157 @@ +--- +title: "@method" +--- + +`@method` is used to inject function code. They are an advanced concept that should be used +after you are comfortable with the basics of Gloo. + +This is available on the following: +* `@class` +* `@test_group` + * `@case` +* `@variant[llm]` +* `@variant[code]` + + +## Supported Languages + +| Language | Tag | Supported | +| -------- | --- | --------- | +| python | `@lang[py]` | 3.10+ | + +## Syntax guide +```gloo Syntax +@method { + @lang[] { + + } +} +``` + +### @class + +When used inside `@class`, methods are properties to a class. + + +```gloo @class +@class Foo { + costs float[] + + @method adder { + @lang[py] { + @property + def adder(self) -> float: + # Note you have access to self and all the properties of Foo + return sum(self.costs) + } + } +} +``` +```python Python Equivalent +from pydantic import BaseModel +from typing import List + +class Foo(BaseModel): + costs: List[float] + + @property + def adder(self) -> float: + # Note you have access to self and all the properties of Foo + return sum(self.costs) +``` + +```typescript Typescript Equivalent +// But now no Zod (so parsing is not as convenient). +class Foo { + costs: number[]; + + get adder(): number { + // Note you have access to this and all the properties of Foo + return this.costs.reduce((a, b) => a + b, 0) + } +} +``` + + +### @test_group +```gloo @test_group +@test_group Group1 for GetSetiment { + @method assert1 { + @lang[py] { + # In a @test_group, the parameters are always the same: + # 1. The input type + # 2. The output type + # Gloo will automatically generate the correct type for you + # based on the @input and @output of the function + # (GetSentiment) + def adder1(arg: InputType, output: OutputType) -> None: + assert output == OutputType.POSITIVE + } + } + ... +} +``` + + +#### @case +```gloo @case +@test_group Group1 for GetSetiment { + @case { + @input "I'm so happy today!" + @method assert1 { + @lang[py] { + # @test_case methods work exactly like @test_group methods + # except they only run for the specific test case. + def adder1(arg: InputType, output: OutputType) -> None: + assert output == OutputType.POSITIVE + } + } + } + @input ... +} +``` + +### @variant[llm] +```gloo @variant[llm] +@variant[llm] v1 for GetSentiment { + @method custom_vars { + @lang[py] { + # custom_vars is a special method that allows you to inject + # variables into the prompt. This is useful for things like + # adding non-input values (e.g.today's date or example responses) + # that are only scoped to the implementation of this LLM. + def custom_vars() -> typing.Dict[str, str]: + return { + "some_var": "my substitution value" + } + } + } + @prompt { + ... + Uses of {@some_var} will be replaced with "my substitution value". + ... + } +} +``` + +### @variant[code] +```gloo @variant[code] +@variant[code] v2 for GetSentiment { + @method impl { + @lang[py] { + # impl is a special method that allows you to inject + # code into the implementation of this variant. + async def impl(arg: InputType) -> OutputType: + # Note that you have access to the input type and output type + # of the variant. + from generated.functions import GetSentiment + + # Conditionally call the LLM, only if you don't have context. + if 'yes' in arg: + return OutputType.POSITIVE + + # Call the LLM Variant + return await GetSentiment('v1', arg) + } + } +} diff --git a/docs/mdx/ref/test-group.mdx b/docs/mdx/ref/test-group.mdx new file mode 100644 index 000000000..e06c00551 --- /dev/null +++ b/docs/mdx/ref/test-group.mdx @@ -0,0 +1,192 @@ +--- +title: '@test_group' +--- + +Test groups offer a way to create tests for [@function](/mdx/ref/function) definitions that test every [@variant](/mdx/ref/variant-llm). + +Lets assume you have a function called `GetSentiment` with two different variants `v1` and `v2` (each variant being a unique prompt). In gloo, you'll automatically get tests created for each variant you define with no extra work. + + +```gloo Gloo +@test_group GroupName for GetSentiment { + @input Some string goes here + @input { + Some multiline string + goes here. + } +} +``` + +```python Python Equivalent (pytest) +from textwrap import dedent +from path.to.GetSentiment import GetSentimentV1, GetSentimentV2 + +@parametrize("version", ["v1", "v2"]) +class TestGroupName: + async def test_case_0(self, version): + arg = "Some string goes here" + if version == "v1": + await GetSentimentV1(arg) + elif version == "v2": + await GetSentimentV2(arg) + + async def test_case_1(self): + arg = dedent("""\ + Some multiline string + goes here.""") + if version == "v1": + await GetSentimentV1(arg) + elif version == "v2": + await GetSentimentV2(arg) +``` + +```typescript TypeScript Equivalent (jest) +import { GetSentimentV1, GetSentimentV2 } from 'path/to/GetSentiment'; + +describe('GroupName', () => { + test.each(['v1', 'v2'])('test_case_0 - %s', async (version) => { + const arg = "Some string goes here"; + if (version === 'v1') { + await GetSentimentV1(arg); + } else if (version === 'v2') { + await GetSentimentV2(arg); + } + }); + + test.each(['v1', 'v2'])('test_case_1 - %s', async (version) => { + const arg = ```Some multiline string + goes here.```; + if (version === 'v1') { + await GetSentimentV1(arg); + } else if (version === 'v2') { + await GetSentimentV2(arg); + } + }); +}); +``` + + +## Properties +`@test_group` names must be unique per function, but can be reused across multiple functions. + +### @input + +`@input` is used to specify an input value per test case. +A single `@test_group` can have multiple `@input` properties. + +#### @case + +Anytime you wish to add properties on a per test case basis, you can use the `@case` decorator. +This can includes things like naming a test case, or adding a custom assert method for that test case. + +#### @case names +If you'd like to name an individual test case, you can use the `@case` decorator. + + +```gloo Gloo +@test_group GroupName for GetSentiment { + @case SpecialTestCase { + @input Some string goes here + } + @input { + Some multiline string + goes here. + } +} +``` + +```python Python Equivalent (pytest) +from textwrap import dedent +from path.to.GetSentiment import GetSentimentV1, GetSentimentV2 + +@parametrize("version", ["v1", "v2"]) +class TestGroupName: + async def test_SpecialTestCase(self, version): + arg = "Some string goes here" + if version == "v1": + await GetSentimentV1(arg) + elif version == "v2": + await GetSentimentV2(arg) + + async def test_case_1(self): + arg = dedent("""\ + Some multiline string + goes here.""") + if version == "v1": + await GetSentimentV1(arg) + elif version == "v2": + await GetSentimentV2(arg) +``` + +```typescript TypeScript Equivalent (jest) +import { GetSentimentV1, GetSentimentV2 } from 'path/to/GetSentiment'; + +describe('GroupName', () => { + test.each(['v1', 'v2'])('SpecialTestCase - %s', async (version) => { + const arg = "Some string goes here"; + if (version === 'v1') { + await GetSentimentV1(arg); + } else if (version === 'v2') { + await GetSentimentV2(arg); + } + }); + + test.each(['v1', 'v2'])('test_case_1 - %s', async (version) => { + const arg = ```Some multiline string + goes here.```; + if (version === 'v1') { + await GetSentimentV1(arg); + } else if (version === 'v2') { + await GetSentimentV2(arg); + } + }); +}); +``` + + + +### @method (custom asserts) + +To add an assert for your test case, you can use `@method`. + +You can read more about [custom asserts here](/mdx/ref/method). + +Gloo will run that assert on every test case in that `@test_group`. + +You don't have to call the function, just define it. Gloo will call it for you after each function call. +```gloo Gloo +@test_group GroupName for GetSentiment { + @method assert1 { + @lang[py] { + def assert1(arg: str, result: Sentiment): + assert result == Sentiment.POSITIVE + } + } + ... +} +``` + + +Instead of `def assert1(arg: str, result: Sentiment):` you can use `def assert1(arg: InputType, result: OutputType):`. Gloo defines `InputType` and `OutputType` in the scope of every `@method` for a `@test_group` + +#### @case asserts +For an assert on a single test case, you can use `@method` scoped to `@case`. + + +```gloo Gloo +@test_group GroupName for GetSentiment { + @case SpecialTestCase { + @method assert1 { + @lang[py] { + def assert1(arg: str, result: Sentiment): + assert result == Sentiment.POSITIVE + } + } + @input "Some string goes here" + } + @input "Some other string goes here" +} +``` + + +## Complex input values diff --git a/docs/mdx/ref/type.mdx b/docs/mdx/ref/type.mdx new file mode 100644 index 000000000..20f5b7724 --- /dev/null +++ b/docs/mdx/ref/type.mdx @@ -0,0 +1,100 @@ +--- +title: "Supported Types" +--- + +## Primitives + +| Type | Since | | +| ----------- | ------------------------ | ---------------------- | +| null | `gloo>=0.2.0` | | +| bool | `gloo>=0.2.0` | | +| int | `gloo>=0.2.0` | | +| float | `gloo>=0.2.0` | | +| string | `gloo>=0.2.0` | | +| char | `gloo>=0.2.0` | | + +## Constructed Types + +| Type | Since | Syntax | Usage | +| ----------- | ------------------------ | ---------------------- | ---- | +| Enum | `gloo>=0.2.0` | See [enum](enum) Docs +| Custom Classes | `gloo>=0.2.0` | See [class](class) Docs +| Optional | `gloo>=0.2.0` | `type?` | `int?` | +| Union | `gloo>=0.2.0` | `type\|type\|` | `int\|MyClass\|` | +| List | `gloo>=0.2.0` | `type[]` | `string[]` | +| Dictionary | | | +| Set | | | +| Tuple | | | + + +`(` and `)` are not yet supported for complex inline type. + +Constructed types can be composed together as well. + +Here are some examples and what their equivalents are in different languages. + +### Example 1 + + +```gloo Gloo +int?|string[]|MyClass| +``` + +```python Python Equivalent +Union[Optional[int], List[str], MyClass] +``` + +```typescript TypeScript Equivalent +(number | undefined) | string[] | MyClass +``` + + + +### Example 2 + +```gloo Gloo +char[] +``` + +```python Python Equivalent +List[str] +``` + +```typescript TypeScript Equivalent +string[] +``` + + + +### Example 3 + + +```gloo Gloo +int|float|[] +``` +```python Python Equivalent +List[Union[int, float]] +``` + +```typescript TypeScript Equivalent +number[] +``` + + + + +### Example 4 + + +```gloo Gloo +int?|string[]|MyClass|[]? +``` + +```python Python Equivalent +Optional[List[Union[Optional[int], List[str], MyClass]]] +``` + +```typescript TypeScript Equivalent +((number | undefined) | string[] | MyClass)[] | undefined +``` + diff --git a/docs/mdx/ref/variant-code.mdx b/docs/mdx/ref/variant-code.mdx new file mode 100644 index 000000000..379a6a31a --- /dev/null +++ b/docs/mdx/ref/variant-code.mdx @@ -0,0 +1,24 @@ +--- +title: "@variant[code]" +--- + +This is an advanced feature. We caution against using it. + +This a way to use a heuristic inside of your pipelines. If you feel this is the +right choice, please reach out and ask first! You can read more about them [here](/mdx/ref/method#variant-code). + +```gloo +@variant[code] my_code_example for GetSentiment { + @method impl { + @lang[py] { + async def impl(arg: InputType) -> OutputType: + # Some implementation + } + } +} +``` + +The `impl` method is a special method that is used to implement the heuristic. +You can import any library or run any code you want inside of it. The only +requirement is that it must take in the `InputType` and return the `OutputType` +as defined by `GetSentiment`. \ No newline at end of file diff --git a/docs/mdx/ref/variant-llm.mdx b/docs/mdx/ref/variant-llm.mdx new file mode 100644 index 000000000..cb9258fc4 --- /dev/null +++ b/docs/mdx/ref/variant-llm.mdx @@ -0,0 +1,374 @@ +--- +title: "@variant[llm]" +--- + +A `@variant[llm]` is an implementation of a `@function` which uses an LLM. Gloo automatically provides: + +1. **A type-safe API for your LLM** -- we handle hallucinations and parsing for you +2. **Database safety** -- `@stringify` allows migrate llm specific text to your well-defined `@output` type +3. **Compile time checks for prompts** +4. **Organization** -- everything (configs, variables, renames) related to each prompt are defined and visible in one place +5. **Observability** -- everything is tracked and visible on the dashboard + + +```gloo Gloo +@variant[llm] v1 for GetSentiment { + @client[llm] GPT35Client + @prompt { + Given a sentence, return the sentiment of the sentence. + + {@input} + + Sentiments: + {@Sentiment.values} + + Sentiment: + } + +} + +@function GetSentiment { +@input string +@output Sentiment +} + +@enum Sentiment { +Positive +Negative +Neutral +} + +@client[llm] GPT35Client { +@provider openai +model gpt-3.5-turbo +temperature 0 +} + +```` + +```python Python Equivalent +from pydantic import BaseModel +from enum import Enum +import openai +from textwrap import dedent + +class Sentiment(str, Enum): + Positive = "Positive" + Negative = "Negative" + Neutral = "Neutral" + +# The gloo generated version additionally includes +# logging and error handling for when the LLM hallucinates +def GetSentiment(arg: str) -> Sentiment: + response = openai.Completion.create( + engine="gpt-3.5-turbo", + prompt=dedent(f"""\ + Given a sentence, return the sentiment of the sentence. + + {arg} + + Sentiments: + Positive + Negative + Neutral + + Sentiment: + """).strip(), + temperature=0, + ) + + sentiment = response.choices[0].text.strip() + + # This might crash if sentiment is not a valid Sentiment + return Sentiment(sentiment) +```` + + + +## Type Safety (error handling) + +Gloo uses our `Forgiving Parser` to help extract data as much as possible. + +Instead of directly passing the results of the LLM to pydantic or zod, we do a first +pass to help massage the data. We'll outline some examples where this is useful. + +| LLM Output | Desired Type | Gloo Output | How | +| -------------------------- | ------------ | ---------------------- | ------------------------------------------------------------------------------------------ | +| positive | Sentiment | `Sentiment.Positive` | We handle case insensitivity | +| \{ "feeling": "positive"\} | Sentiment | `Sentiment.Positive` | When looking for a singular value, we can parse dictionaries of 1 keys as singular objects | +| positive | Sentiment[] | `[Sentiment.Positive]` | None array types are automatically converted | + +This applies not just to `@enum` types, but all types ([See types](/mdx/ref/type)). + +## Injecting inputs into the prompt + +### @input + +`@input` allows you to pass in the function's input text (or object) to the LLM prompt + +#### How @input behaves if it's a string or enum + +Using \{@input\} in your prompt is the equivalent of `str(arg)`. + +#### How @input behaves if it's a custom type + +Sometimes you might want to have your function take in an object. In this case, adding `{@input}` to your prompt is still used to access `str(your-object)`, but you can inject individual properties like this: + +`@input.property1` to inject `str(arg.property1)` + +This can be done with class `@method` as well ([See Methods](/mdx/ref/method#class)), with `@input.methodName`. + +### Formatting the @output + +The @output is the function's return type. + +Using `{@output.json}` in the prompt is used to print out the JSON schema of the output type in your prompt. +Gloo automatically generates a JSON schema for you, but you can customize it with `@stringify`. + +#### @stringify - Add a custom output schema description + +In LLMs it is often useful to rename or provide descriptions about the task you are attempting to do. For example, you may want to rename a property from `duration` to `duration_in_minutes` so the LLM can understand what value it should output better. + +`@stringify` allows you to change the default JSON schema of any property or sub-property of your output type. + +You can use it to change the `@output.json` from being serialized as: + +``` +{ + "duration": 10 +} +``` + +to + +```json +{ + "duration_in_minutes": 10 +} +``` + +without having to change your code or the actual type definition. + +`{@output.json}` will automatically parse all your @stringify'd properties and replace the default JSON schema with your own custom aliases and descriptions of each field. + +#### @stringify -- Enums + +You can use `@EnumName.values` to automatically inject all the `value: description` pairs of your enums. +For values without a description, only `value` will be injected. + +To modify the description of each value, see the examples below. + + +```gloo Ex 1: describe +@enum Sentiment { + Positive + Negative + Neutral +} +@variant[llm] v1 for GetSentiment { + .. + @stringify Sentiment { + Positive + @describe{When the sentence is about good things that happened recently} + + Negative + @describe{When the sentence is about bad things that happened recently} + + Neutral + @describe{ + Sentences that are neutral or happend too long ago to be relevant + } + } + +} + +```` + +```gloo Ex 1: Prompt +Given a sentence, return the sentiment of the sentence. + +{@input} + +Sentiments: +Positive: When the sentence is about good things that happened recently +Negative: When the sentence is about bad things that happened recently +Neutral: Sentences that are neutral or happend too long ago to be relevant + +Sentiment: +```` + +```gloo Ex 2: Rename +# In this example, we rename what we call each `Sentiment` value, but still +# guarantee the `Sentiment` enum as an output (thanks to Gloo's parser). +@variant[llm] v2 for GetSentiment { + ... + @stringify Sentiment { + Positive @rename{Good} + @describe{When the sentence is about good things that happened recently} + + Negative @rename{Bad} + @describe{When the sentence is about bad things that happened recently} + + Neutral + @describe{Sentences that are neutral or happend too long ago to be relevant} + } +} +``` + +```gloo Ex 2: Prompt +Given a sentence, return the sentiment of the sentence. + +{@input} + +Sentiments: +Good: When the sentence is about good things that happened recently +Bad: When the sentence is about bad things that happened recently +Neutral: Sentences that are neutral or happend too long ago to be relevant + +Sentiment: +``` + +```gloo Ex 3: Skip +# You can also skip any values you don't want to parse. +@variant[llm] v2 for GetSentiment { + ... + @stringify Sentiment { + Positive @rename{Good} + @describe{When the sentence is about good things that happened recently} + + Negative @rename{Bad} + @describe{When the sentence is about bad things that happened recently} + + Neutral @rename{Neither} @skip + @describe{ + Sentences that are neutral or happend too long ago to be relevant + } + } +} +``` + +```gloo Ex 3: Prompt +Given a sentence, return the sentiment of the sentence. + +{@input} + +Sentiments: +Good: When the sentence is about good things that happened recently +Bad: When the sentence is about bad things that happened recently + +Sentiment: +``` + + + +#### @stringify -- Class + +Just like enums, you have access to `@describe` and `@rename` for class properties. `@skip` is not avaiable. + +There is a special method you can use to represent the class as a string: `@ClassName.json`. + +In all of the examples below, the output of the function will still be the type: `Person[]`. + +```gloo +@class Person { + name string + age int +} + +@function GetPerson { + @input string + @output Person[] +} +``` + + + +```gloo Ex 1: Basic +@variant[llm] v1 for GetPerson { + @prompt { + Given the sentence, return all the details about all people detected. + + {@input} + + Output JSON: + {@output.json} + + JSON: + } +} +``` + +```gloo Ex 1: Prompt +Given the sentence, return all the details about all people detected. + +{@input} + +Output JSON: +{ "name": string, "age": int }[] + +JSON: +``` + +```gloo Ex 2: Describe +# In this example, we'll add a description to get more details. +@variant[llm] v2 for GetPerson { + @stringify Person { + age @describe{in months} + } + + @prompt { + Given the sentence, return all the details about all people detected. + + {@input} + + Output JSON: + {@output.json} + + JSON: + } +} +``` + +```gloo Ex 2: Prompt +Given the sentence, return all the details about all people detected. + +{@input} + +Output JSON: +{ "name": string, "age": age in months }[] + +JSON: +``` + +```gloo Ex 3: Rename +# In this example, we'll instead rename the property to `age_in_months`. +@variant[llm] v3 for GetPerson { + @stringify Person { + age @rename{age_in_months} + } + + @prompt { + Given the sentence, return all the details about all people detected. + + {@input} + + Output JSON: + {@output.json} + + JSON: + } +} +``` + +```gloo Ex 3: Prompt +Given the sentence, return all the details about all people detected. + +{@input} + +Output JSON: +{ "name": string, "age_in_months": int }[] + +JSON: +``` + + diff --git a/docs/mdx/ref/variant-pmodel.mdx b/docs/mdx/ref/variant-pmodel.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/running-tests.mdx b/docs/mdx/running-tests.mdx new file mode 100644 index 000000000..9c33aff34 --- /dev/null +++ b/docs/mdx/running-tests.mdx @@ -0,0 +1,41 @@ +--- +title: "Running Tests" +--- + +To run tests on gloo, you can run pytest. + +You can read more about the `-k` arg of pytest here ([PyTest Docs](https://docs.pytest.org/en/latest/example/markers.html#using-k-expr-to-select-tests-based-on-their-name)) + +```bash +# From your project root +# Runs all tests generated from @test_group +# For every function, for every variant +pytest -m gloo_test +``` + +To run tests for just one function +```bash +# From your project root +# Note the underscore at the end of the folder name +pytest -m gloo_test ./generated/[function_name]_ +``` + +To run tests for just one test group +```bash +# From your project root +pytest -m gloo_test -k [test_group_name] +``` + +To run a specific test case in a test group +```bash +# From your project root +pytest -m gloo_test -k '[test_group_name] and [test_case_name]' +``` + +## Coming Soon +```bash +# From anywhere within your project +gloo test --function [function_name] +gloo test --group [test_group_name] +gloo test --group [test_group_name] --case [test_case_name] +``` \ No newline at end of file diff --git a/docs/mdx/should-you-use-gloo.mdx b/docs/mdx/should-you-use-gloo.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/troubleshooting.mdx b/docs/mdx/troubleshooting.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/what-is-gloo.mdx b/docs/mdx/what-is-gloo.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mdx/why-gloo.mdx b/docs/mdx/why-gloo.mdx new file mode 100644 index 000000000..e69de29bb diff --git a/docs/mint.json b/docs/mint.json new file mode 100644 index 000000000..d6c1e4359 --- /dev/null +++ b/docs/mint.json @@ -0,0 +1,108 @@ +{ + "$schema": "https://mintlify.com/schema.json", + "name": "Gloo", + "logo": { + "light": "/logo/favicon.png", + "dark": "/logo/favicon.png" + }, + "favicon": "/favicon.png", + "colors": { + "primary": "#8b5cf6", + "light": "#a5b4fc", + "dark": "#020617", + "background": { + "dark": "#020617" + }, + "anchors": { + "from": "#FF7F57", + "to": "#a5b4fc" + } + }, + "topbarLinks": [ + { + "name": "Support", + "url": "mailto:contact@trygloo.com" + } + ], + "topbarCtaButton": { + "name": "Schedule call", + "url": "https://calendly.com/gloo-vai/30min" + }, + "tabs": [], + "navigation": [ + { + "group": "Get Started", + "pages": [ + "mdx/overview", + "mdx/installation", + "mdx/quickstart", + "mdx/running-tests" + ] + }, + { + "group": "Concepts", + "pages": [ + "mdx/what-is-gloo", + "mdx/why-gloo", + "mdx/should-you-use-gloo" + ] + }, + { + "group": "Guides", + "pages": [ + { + "group": "Basic", + "pages": [ + "mdx/examples/classification", + "mdx/examples/extraction", + "mdx/examples/translation", + "mdx/examples/summarization", + "mdx/examples/q-and-a" + ] + }, + { + "group": "Advanced", + "pages": ["mdx/examples/chat-bot"] + } + ] + }, + { + "group": "Reference", + "pages": [ + { + "group": "Gloo Config Syntax", + "pages": [ + { + "group": "Types", + "pages": ["mdx/ref/type", "mdx/ref/enum", "mdx/ref/class"] + }, + { + "group": "Functions", + "pages": [ + "mdx/ref/function", + "mdx/ref/test-group", + { + "group": "@variant", + "pages": ["mdx/ref/variant-llm", "mdx/ref/variant-code"] + } + ] + }, + { + "group": "clients", + "pages": ["mdx/ref/client-llm", "mdx/ref/client-code"] + }, + "mdx/ref/method" + ] + } + ] + } + ], + "footerSocials": { + "twitter": "https://twitter.com/trygloo", + "github": "https://github.com/GlooHQ" + }, + "modeToggle": { + "default": "light", + "isHidden": false + } +} diff --git a/docs/node_modules/.yarn-integrity b/docs/node_modules/.yarn-integrity new file mode 100644 index 000000000..9937b9e17 --- /dev/null +++ b/docs/node_modules/.yarn-integrity @@ -0,0 +1,10 @@ +{ + "systemParams": "darwin-arm64-93", + "modulesFolders": [], + "flags": [], + "linkedModules": [], + "topLevelPatterns": [], + "lockfileEntries": {}, + "files": [], + "artifacts": {} +} \ No newline at end of file diff --git a/docs/quickstart-old.mdx b/docs/quickstart-old.mdx new file mode 100644 index 000000000..573ff07e2 --- /dev/null +++ b/docs/quickstart-old.mdx @@ -0,0 +1,261 @@ +--- +title: "Quickstart" +description: Start building with Gloo in 5 minutes +--- + +Gloo allows developers to build, test and deploy powerful LLM apps using a task-based (aka "function-based") architecture. + +In order for any chatbot or LLM-powered app to interface with your existing code, you need to be able to mold its output into a structured format your program can understand. + + + An AI agent that uses "tools" is really just performing a classification task. + The input is a question, and the output is a list of tools. This is just one + example. Gloo allows you to define tasks like these in very simple steps, and + in the future, use the LLM-generated data to train specialized models. + + +Gloo consists of the following parts: + +1. **Gloo CLI** +2. **Gloo Task Definitions** +3. **Gloo Dashboard + Data** warehouse to view tests, or query your data to train your own models. + +## Building a semantic search task + +In this guide we will walk through how you can build a semantic search task using Gloo. + +#### Prerequisites: + +1. Install [Node + NPM](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) +1. Install npx: `npm install -g npx`, which can run the Gloo CLI without installing it globally +1. Run `npx @gloo-ai/client --version`, and if you see the version you're now ready to go. + +### Step 1: Add the task to a tasks.yaml file + +The `tasks.yaml` is a definition of all, or a subset of tasks you want your program to perform using generative models. You can think of each task as the equivalent of an LLM prompt with a specific instruction and an output schema. + +This file can have many different tasks (e.g. summarize, translate, classify), that you can run in sequence (or in parallel) to make an LLM "pipeline" (aka an AI agent). + +```yaml project/tasks/tasks.yaml +tasks: + SearchDocuments: + input: SearchInput + output: SearchOutput + # You could add more tasks here + # Summarize... + # input: .... + +types: + ## Search + SearchInput: + query: string + context: string + SearchOutput: + clues: list + reasoning: string + answer: string +``` + +Each input and output maps to a `type` defined in the `types` section. + +### Generate your Task Definitions + +Next we will use the **Gloo CLI** to generate your task definitions with this command: + +```bash +npx @gloo-ai/client generate python --yaml tasks/tasks.yaml +``` + +This will generate a few files in your project: + +```bash +tasks/ + task_search_documents/ + v0/ + generated.py + task.py + test.py # Coming soon +``` + + + You must run this CLI anytime you change the tasks.yaml file. The CLI will + automatically create a new version of your task.py if it detects your new yaml + is incompatible with the existing task.py. + + +### Edit your generated Task Definition + +Now we can edit the generated `task.py`. At a glance, you will see a lot of commented out functions. +Below we go over each of these functions and what they do. + +We will leverage the input variables defined in `task.yaml` schema to build our prompt, and we will ensure the output instructions can output the schema we desire. + +Recall that for our **Search** task the desired output schema is: + +```yaml +SearchOutput: + clues: list + reasoning: string + answer: string +``` + +Which translates to this JSON: + +```json +{ + "clues": string[], + "reasoning": string, + "answer": string, +} +``` + +We now edit the prompt so our LLM actually spits out that output format. + +```python task.py + class search(GlooLLMTaskInterface): + # ... + + def edit_llm_client(self) -> LLMClient: + return OpenAILLMClient(model_name="gpt-3.5-turbo", temperature=0) + + def edit_prompt_template(self) -> str: + return f""" + Answer the question given using the following context: + + Context: {VARS.in_SearchInput.context} + Question: {VARS.in_SearchInput.query} + + Respond in the following format: + { + "clues": string[], + "reasoning": string, + "answer": string, + } + + JSON: + """ + # ... + + async def run_v0(*, input: SearchInputModel) -> SearchOutputModel: + task = search() + return await task.run(input) +``` + +Note how we were able to refer to variables defined in the `task.yaml` **SearchInput** fields. These are all automatically generated, and as you guessed it, the output format can also be generated using your generated **SearchOutput** schema fields. This will generate the same text as above: + +```string +... +Output in the following format: + +{ + "{VARS.out_SearchOutput.clues}": string[], + "{VARS.out_SearchOutput.reasoning}": string, + "{VARS.out_SearchOutput.answer}": string, +} +... +``` + +All of these variable substitutions are tracked by Gloo and can be tracked individually in the dashboard. Gloo will let you use this data to dive deep into potential issues caused by specific sections of your prompt. I.e. you could narrow down which part of your prompt causes the most impact in generating good or bad responses. +-- (dashboard link) -- + + + +In general, view the `tasks.yaml` as a definition of _what_ tasks are, and the generated `task.py` files as definitions for _how_ they are executed. In the case of LLM apps, the "how" is done using prompt engineering and a combination of different LLM model parameters like temperature, etc. You can see both are defined in the next example. + + + +### Run the task + +```Python +from tasks.task_search_documents import run_search_documents +import asyncio + +def run_search() + context = perform_semantic_search(query) + input = SearchInputModel( + query="What is the capital of France?", + context={context}, + ) + # Note you can run synchronous version by importing run_search_documents_sync instead. + output = asyncio.run(run_search_documents(input=input)) + print(output) +``` + +### Advanced prompt engineering + +Recall the simple schema we have: + +``` + { + "clues": string[], + ... + } +``` + +Using `string[]` may not be the best description for `clues`. For example, you may want to indicate what kind of clues you want the model to actually output. Maybe you only want sentiment-based clues, or entities, or facts. We need something more than just declaring the type of `clues`. To do this, there a a couple of ways. + +#### 1. Add a "clues" instruction using a prompt template variable + +Instructions can be provided at the top of the prompt. Depending on the LLM model, performance may change by adding these at the beginning, rather than within the schema itself. + +To experiment with this... -TODO- + +#### 2. Use the Output -> JSON converter to define a custom description. + +Gloo comes with a built-in function called `edit_types_json_converter` that allows you to define the output format "description" and use it in your prompt. + +We will change the clues property in these ways: + +1. Change the name to "hints" in the prompt +2. Add more information about what type of clues to retrieve. + +```python task.py +class search(...) + def edit_types_json_converter(self) -> search__Description: + return { + "SearchOutput": { + "clues": { + "name": "hints", + "description": "string[], where each clue is an entity that will help answer the question in a factual way", + }, + ... other fields search output ... + }, + } + + def edit_prompt_template() + return f''' + Answer the question given using the following context: + + Context: {VARS.in_SearchInput.context} + Question: {VARS.in_SearchInput.query} + + Respond in the following format: + { + "{VARS.out_SearchOutput.clues}": {VARS.out_SearchOutput.clues.desc}, + "reasoning": string, + "answer": string, + } + + JSON: + ''' +``` + +Note we refer to the description using the variable: `{VARS.out_SearchOutput.clues.desc}`. Gloo gives you a reference to all your input and output variables, as well as their descriptions. + +Now that we've finished the edit, our prompt will now have the custom description and name. + +```string +... +{ + "hints": string[], where each clue is an entity that will help answer the question in a factual way, + "reasoning": string, + "answer": string, +} +... +``` + +Even though this property is called "hints" in the prompt, the actual output of this task will contain a "clues" property (what was defined in our `tasks.yaml`). This allows you to prompt engineer more easily without having to change all your variable names all the time. + +## Testing + +(TODO) diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx new file mode 100644 index 000000000..a3ac37243 --- /dev/null +++ b/docs/quickstart.mdx @@ -0,0 +1,196 @@ +## What is Gloo? + +Gloo is a toolkit that allows you to build and monitor LLM (or other ML model) pipelines using a **function-based** architecture, where every call to a model has a defined input and output schema. Gloo uses a unique configuration file (GlooConfig) define your LLM functions in just a few lines. + +If you’re building a chatbot, or any LLM application Gloo is a good way to decompose your task into specific observable tasks (classification, entity extraction, etc). + + + +Gloo consists of the following parts: + +1. **Gloo Config -** Human-readable schema designed to declare your LLM functions and their inputs and outputs. In a more readable and maintanable way than yamls or plaintext files. +2. **Gloo Intellisense VSCode Extension -** Enables GlooLang intellisense in VSCode +3. **Gloo CLI -** Builds gloo-lang files into executable functions for Python (and soon Typescript). +4. **Gloo Dashboard -** A dashboard where you can see Gloo-generated telemetry, like function call failure rates, etc. + +### Example Schema + +Example Gloo Schema for a sentiment classifier using an LLM + + + +(See sections below to learn how it works) + +Gloo saves hundreds of lines of code in telemetry, serialization and deserialization logic. + +## Getting started + +### Pre-requirements + +Currently Gloo only supports Python, with the [Poetry](https://python-poetry.org/docs/) package manager. If you're using pip instead of poetry, reach out and we can help you set up. + +For updates on Typescript release, feel free to reach out at contact@trygloo.com + +### Create a Gloo Project in the dashboard + +Visit [beta.app.trygloo.com](https://beta.app.trygloo.com) and create a new project. + +#### Setup environment variables + +On the dashboard, click on the project you created and go to "Keys". Note down the project ID and create a new secret to use as the APP SECRET below. + +```bash +GLOO_APP_ID=proj_123... +GLOO_APP_SECRET=gloo:your-key-from-dashboard +OPENAI_API_KEY=your-open-ai-key +# temporarily redirect to beta (production deployment coming soon) +GLOO_BASE_URL=https://beta.app.trygloo.com/api +``` + +### Install the **Gloo CLI** + +``` +brew tap gloohq/gloo +brew install gloo +``` + +To update the CLI + +``` +brew update +brew upgrade gloo +``` + +### Install the **Gloo VSCode Extension** + +Search for gloo.gloo in the VSCode extension marketplace + +![Gloo Intellisense](/images/extension.png) + +### Initialize Gloo in your project + +``` +gloo init +``` + +![](/images/glooinit.png) + +### Folder structure + +``` +. +├── app +│ └── your application files +├── gloo +│ └── your gloo files live here +├── pyproject.toml +└── generated + └── translated, runnable python code from .gloo. + Don't edit these files. +``` + +## Gloo Config Deep dive (5 min read) + +### Functions + +Functions (and other unique types on GlooConfig) start with an **@** symbol. They are the building blocks of your LLM pipeline. + + + +### Variants + +Variants specify how functions are executed. Functions are the blueprint as to what type of data they accept and return, and variants are the implementation of those functions. + +The following is an **LLM Variant** that prompts the model to take a string and return a boolean. + + + +### Running a function + +To run a function in your code you have to import it from the generated folder. The generated folder is where the Gloo CLI translates your GlooLang files into runnable Python code. + + + The generated functions are async by default. You can use asyncio to make them + run synchronously{" "} + + + + +Press `control + space` on VSCode to get suggestions for all the functions you can run. + + + +### Testing + +Gloo makes it easy to add function tests by declaring a test_group like below: + + + +These get automatically compiled into Python code that you can run using pytest. + +You can declare as many different test groups as you want for a function. Each test group runs all variants of the function that have been declared. + +E.g. if you have 3 versions of a prompt, running the test will run all 3 variants so you can compare results. + +Note the **curly syntax** on Gloo: + +- inline data does not require \{\} +- multi-line strings or data requires \{\} + +To run the test execute `pytest -s -k MyTests` in your terminal. + + + +and view the results on the dashboard using the link + + + +#### Test assertions + +To assert a certain result from the output, declare a **@method** for your **@test_group** that takes in 2 special types: **InputType** and **OutputType**, which are the input and output types of your function. You can then use the **assert** keyword to assert a certain result. + + + + +On GlooConfig, you can write in-line python code using the **@lang[py]** identifier under **@method** statements. This gets copied to into the generated boilerplate code under `/generated` + + + + + We recommend writing the python functions in an actual python file and copying + them over. We are working on syntax highlighting and linting for these + statements. + + +### Prompt engineering on Gloo + +#### Basic example + +The prompt can take a multi-line string inside a \{\} block. No need to escape common characters like quotes. If you want to escape a curly brace, use \{\{ and \}\}. + +The prompt can leverage two special variables, the **@input** and **@output**. + +The **@input** allows you to inject your input into the prompt. If your input is an object, you can access its properties using dot notation, such as **\{@input.text\}**. + +The **@output** allows you to inject the output schema using **\{@output.json\}** + +See the below Example for a prompt defined inside an @variant\[llm\] for a function. + + + +and the end result + + + +#### Modifying the output schema descriptions + +LLMs can use descriptions to figure out what to inject into each field in your output schema. In GlooConfig we can accomplish +this by using the @stringify keyword, which only works on the Function's @output types. diff --git a/docs/yarn.lock b/docs/yarn.lock new file mode 100644 index 000000000..fb57ccd13 --- /dev/null +++ b/docs/yarn.lock @@ -0,0 +1,4 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + diff --git a/release/.env.sample b/release/.env.sample new file mode 100644 index 000000000..856edc876 --- /dev/null +++ b/release/.env.sample @@ -0,0 +1 @@ +VSCODE_RELEASE_TOKEN="4i2xozf2o35a3hwdkbnq7oqrpig3syxvpbpzsgf2dkgk34xfip5q" diff --git a/release/extension.sh b/release/extension.sh new file mode 100644 index 000000000..e69de29bb diff --git a/release/package.sh b/release/package.sh new file mode 100755 index 000000000..3940c96b0 --- /dev/null +++ b/release/package.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set -e +set -x +# cd into target/release from the current dir +cargo build --release +cd ./target/release +tar -czvf gloo.tar.gz gloo +echo $(shasum -a 256 "gloo.tar.gz") \ No newline at end of file diff --git a/root.code-workspace b/root.code-workspace new file mode 100644 index 000000000..36a5f17be --- /dev/null +++ b/root.code-workspace @@ -0,0 +1,50 @@ +{ + "folders": [ + { + "name": "Rust CLI", + "path": "cli" + }, + { + "name": "VSCode Extension", + "path": "vscode-ext" + }, + { + "name": "Python Client", + "path": "clients/python" + }, + { + "name": "Docs", + "path": "docs" + }, + { + "name": "Release Tools", + "path": "release" + }, + { + "name": "test1/python", + "path": "client-tests/test1/python" + }, + { + "path": ".github" + } + ], + "settings": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "esbenp.prettier-vscode", + "[rust]": { + "editor.defaultFormatter": "rust-lang.rust-analyzer" + }, + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter" + }, + "[cpp]": { + "editor.defaultFormatter": "xaver.clang-format" + }, + "mypy.enabled": false, + "gloo.path": "${workspaceFolder}/cli/target/debug", + "editor.tabSize": 2 + }, + "extensions": { + "recommendations": ["xaver.clang-format"] + } +} diff --git a/vscode-ext/.eslintrc.json b/vscode-ext/.eslintrc.json new file mode 100644 index 000000000..5dfecab7e --- /dev/null +++ b/vscode-ext/.eslintrc.json @@ -0,0 +1,18 @@ +{ + "root": true, + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": 6, + "sourceType": "module" + }, + "plugins": ["@typescript-eslint"], + "rules": { + "@typescript-eslint/naming-convention": "warn", + "@typescript-eslint/semi": "warn", + "curly": "warn", + "eqeqeq": "warn", + "no-throw-literal": "warn", + "semi": "off" + }, + "ignorePatterns": ["out", "dist", "**/*.d.ts"] +} diff --git a/vscode-ext/.gitignore b/vscode-ext/.gitignore new file mode 100644 index 000000000..5f8d342ae --- /dev/null +++ b/vscode-ext/.gitignore @@ -0,0 +1,6 @@ +out +dist +node_modules +.vscode-test/ +*.vsix +.DS_Store diff --git a/vscode-ext/.vscodeignore b/vscode-ext/.vscodeignore new file mode 100644 index 000000000..389996760 --- /dev/null +++ b/vscode-ext/.vscodeignore @@ -0,0 +1,10 @@ +.vscode/** +.vscode-test/** +src/** +.gitignore +.yarnrc +vsc-extension-quickstart.md +**/tsconfig.json +**/.eslintrc.json +**/*.map +**/*.ts diff --git a/vscode-ext/LICENSE b/vscode-ext/LICENSE new file mode 100644 index 000000000..e69de29bb diff --git a/vscode-ext/README.md b/vscode-ext/README.md new file mode 100644 index 000000000..50eb4665f --- /dev/null +++ b/vscode-ext/README.md @@ -0,0 +1,35 @@ +# Gloo Language VS Code Extension + +This VS Code extension provides support for the Gloo language used to define function-based LLM pipelines. + +### General features + +1. **Auto-build on Save**: Anytime a `.gloo` file is saved, the build script is automatically triggered. +2. **Syntax Highlighting**: Provides enhanced readability and coding experience by highlighting the Gloo language syntax for any file with the `.gloo` extension. + +## Prerequisites + +Gloo tooling is currently only available on macOS. + +First, install the **gloo CLI**: + +1. `brew tap gloohq/gloo` +2. `brew install gloo` + +To update the CLI + +1. `brew update` +2. `brew upgrade gloo` + +## Usage + +Initialize gloo in your Python project at the project root (and ensure you are using Poetry): + +`gloo init` + +**Auto-build on Save**: +When you save your `.gloo` files (`Ctrl+S`), the build script (`gloo build`) will automatically run. + +--- + +For any issues, feature requests, or contributions, please reach out at contact@trygloo.com diff --git a/vscode-ext/language-configuration.json b/vscode-ext/language-configuration.json new file mode 100644 index 000000000..f9fd39b07 --- /dev/null +++ b/vscode-ext/language-configuration.json @@ -0,0 +1,24 @@ +{ + "comments": { + // symbol used for single line comment. Remove this entry if your language does not support line comments + "lineComment": "//", + // symbols used for start and end a block comment. Remove this entry if your language does not support block comments + "blockComment": ["/*", "*/"] + }, + // symbols that are auto closed when typing + "autoClosingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + ["\"", "\""], + ["'", "'"] + ], + // symbols that can be used to surround a selection + "surroundingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + ["\"", "\""], + ["'", "'"] + ] +} diff --git a/vscode-ext/logo.png b/vscode-ext/logo.png new file mode 100644 index 000000000..4827c5f72 Binary files /dev/null and b/vscode-ext/logo.png differ diff --git a/vscode-ext/package-lock.json b/vscode-ext/package-lock.json new file mode 100644 index 000000000..77f840c9e --- /dev/null +++ b/vscode-ext/package-lock.json @@ -0,0 +1,3085 @@ +{ + "name": "gloo", + "version": "0.3.5", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "gloo", + "version": "0.3.5", + "devDependencies": { + "@types/glob": "^8.1.0", + "@types/mocha": "^10.0.1", + "@types/node": "^16.18.46", + "@types/vscode": "^1.73.0", + "@typescript-eslint/eslint-plugin": "^5.42.0", + "@typescript-eslint/parser": "^5.42.0", + "@vscode/test-electron": "^2.3.4", + "eslint": "^8.26.0", + "typescript": "^5.1.3" + }, + "engines": { + "vscode": "^1.74.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.3.tgz", + "integrity": "sha512-uj3pT6Mg+3t39fvLrj8iuCIJ38zKO9FpGtJ4BBJebJhEwjoT+KLVNCcHT5QC9NGRIEi7fZ0ZR8YRb884auB4Lg==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.4.0", + "globals": "^13.15.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.7", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.7.tgz", + "integrity": "sha512-kBbPWzN8oVMLb0hOUYXhmxggL/1cJE6ydvjDIGi9EnAGUyA7cLVKQg+d/Dsm+KZwx2czGHrCmMVLiyg8s5JPKw==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==", + "dev": true, + "dependencies": { + "@types/minimatch": "^5.1.2", + "@types/node": "*" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "node_modules/@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "node_modules/@types/mocha": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.1.tgz", + "integrity": "sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==", + "dev": true + }, + "node_modules/@types/node": { + "version": "16.18.46", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.46.tgz", + "integrity": "sha512-Mnq3O9Xz52exs3mlxMcQuA7/9VFe/dXcrgAyfjLkABIqxXKOgBRjyazTxUbjsxDa4BP7hhPliyjVTP9RDP14xg==", + "dev": true + }, + "node_modules/@types/semver": { + "version": "7.3.13", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", + "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", + "dev": true + }, + "node_modules/@types/vscode": { + "version": "1.73.1", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.73.1.tgz", + "integrity": "sha512-eArfOrAoZVV+Ao9zQOCaFNaeXj4kTCD+bGS2gyNgIFZH9xVMuLMlRrEkhb22NyxycFWKV1UyTh03vhaVHmqVMg==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.42.0.tgz", + "integrity": "sha512-5TJh2AgL6+wpL8H/GTSjNb4WrjKoR2rqvFxR/DDTqYNk6uXn8BJMEcncLSpMbf/XV1aS0jAjYwn98uvVCiAywQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.42.0", + "@typescript-eslint/type-utils": "5.42.0", + "@typescript-eslint/utils": "5.42.0", + "debug": "^4.3.4", + "ignore": "^5.2.0", + "natural-compare-lite": "^1.4.0", + "regexpp": "^3.2.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^5.0.0", + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.42.0.tgz", + "integrity": "sha512-Ixh9qrOTDRctFg3yIwrLkgf33AHyEIn6lhyf5cCfwwiGtkWhNpVKlEZApi3inGQR/barWnY7qY8FbGKBO7p3JA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.42.0", + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/typescript-estree": "5.42.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.42.0.tgz", + "integrity": "sha512-l5/3IBHLH0Bv04y+H+zlcLiEMEMjWGaCX6WyHE5Uk2YkSGAMlgdUPsT/ywTSKgu9D1dmmKMYgYZijObfA39Wow==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/visitor-keys": "5.42.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.42.0.tgz", + "integrity": "sha512-HW14TXC45dFVZxnVW8rnUGnvYyRC0E/vxXShFCthcC9VhVTmjqOmtqj6H5rm9Zxv+ORxKA/1aLGD7vmlLsdlOg==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "5.42.0", + "@typescript-eslint/utils": "5.42.0", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.42.0.tgz", + "integrity": "sha512-t4lzO9ZOAUcHY6bXQYRuu+3SSYdD9TS8ooApZft4WARt4/f2Cj/YpvbTe8A4GuhT4bNW72goDMOy7SW71mZwGw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.42.0.tgz", + "integrity": "sha512-2O3vSq794x3kZGtV7i4SCWZWCwjEtkWfVqX4m5fbUBomOsEOyd6OAD1qU2lbvV5S8tgy/luJnOYluNyYVeOTTg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/visitor-keys": "5.42.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.42.0.tgz", + "integrity": "sha512-JZ++3+h1vbeG1NUECXQZE3hg0kias9kOtcQr3+JVQ3whnjvKuMyktJAAIj6743OeNPnGBmjj7KEmiDL7qsdnCQ==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "@types/semver": "^7.3.12", + "@typescript-eslint/scope-manager": "5.42.0", + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/typescript-estree": "5.42.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^3.0.0", + "semver": "^7.3.7" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.42.0.tgz", + "integrity": "sha512-QHbu5Hf/2lOEOwy+IUw0GoSCuAzByTAWWrOTKzTzsotiUnWFpuKnXcAhC9YztAf2EElQ0VvIK+pHJUPkM0q7jg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.42.0", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@vscode/test-electron": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.4.tgz", + "integrity": "sha512-eWzIqXMhvlcoXfEFNWrVu/yYT5w6De+WZXR/bafUQhAp8+8GkQo95Oe14phwiRUPv8L+geAKl/QM2+PoT3YW3g==", + "dev": true, + "dependencies": { + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "jszip": "^3.10.1", + "semver": "^7.5.2" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/acorn": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", + "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.26.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.26.0.tgz", + "integrity": "sha512-kzJkpaw1Bfwheq4VXUezFriD1GxszX6dUekM7Z3aC2o4hju+tsR/XyTC3RcoSD7jmy9VkPU3+N6YjVU2e96Oyg==", + "dev": true, + "dependencies": { + "@eslint/eslintrc": "^1.3.3", + "@humanwhocodes/config-array": "^0.11.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.4.0", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.15.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/eslint/node_modules/eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/eslint/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/espree": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.0.tgz", + "integrity": "sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw==", + "dev": true, + "dependencies": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esquery/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "node_modules/fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "dependencies": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", + "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "13.17.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", + "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "dev": true + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/js-sdsl": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.1.5.tgz", + "integrity": "sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, + "node_modules/jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "dev": true, + "dependencies": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dev": true, + "dependencies": { + "immediate": "~3.0.5" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "node_modules/natural-compare-lite": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", + "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", + "dev": true + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.3.tgz", + "integrity": "sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@eslint/eslintrc": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.3.tgz", + "integrity": "sha512-uj3pT6Mg+3t39fvLrj8iuCIJ38zKO9FpGtJ4BBJebJhEwjoT+KLVNCcHT5QC9NGRIEi7fZ0ZR8YRb884auB4Lg==", + "dev": true, + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.4.0", + "globals": "^13.15.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + } + }, + "@humanwhocodes/config-array": { + "version": "0.11.7", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.7.tgz", + "integrity": "sha512-kBbPWzN8oVMLb0hOUYXhmxggL/1cJE6ydvjDIGi9EnAGUyA7cLVKQg+d/Dsm+KZwx2czGHrCmMVLiyg8s5JPKw==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + } + }, + "@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true + }, + "@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true + }, + "@types/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==", + "dev": true, + "requires": { + "@types/minimatch": "^5.1.2", + "@types/node": "*" + } + }, + "@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "@types/mocha": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.1.tgz", + "integrity": "sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==", + "dev": true + }, + "@types/node": { + "version": "16.18.46", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.46.tgz", + "integrity": "sha512-Mnq3O9Xz52exs3mlxMcQuA7/9VFe/dXcrgAyfjLkABIqxXKOgBRjyazTxUbjsxDa4BP7hhPliyjVTP9RDP14xg==", + "dev": true + }, + "@types/semver": { + "version": "7.3.13", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", + "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", + "dev": true + }, + "@types/vscode": { + "version": "1.73.1", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.73.1.tgz", + "integrity": "sha512-eArfOrAoZVV+Ao9zQOCaFNaeXj4kTCD+bGS2gyNgIFZH9xVMuLMlRrEkhb22NyxycFWKV1UyTh03vhaVHmqVMg==", + "dev": true + }, + "@typescript-eslint/eslint-plugin": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.42.0.tgz", + "integrity": "sha512-5TJh2AgL6+wpL8H/GTSjNb4WrjKoR2rqvFxR/DDTqYNk6uXn8BJMEcncLSpMbf/XV1aS0jAjYwn98uvVCiAywQ==", + "dev": true, + "requires": { + "@typescript-eslint/scope-manager": "5.42.0", + "@typescript-eslint/type-utils": "5.42.0", + "@typescript-eslint/utils": "5.42.0", + "debug": "^4.3.4", + "ignore": "^5.2.0", + "natural-compare-lite": "^1.4.0", + "regexpp": "^3.2.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/parser": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.42.0.tgz", + "integrity": "sha512-Ixh9qrOTDRctFg3yIwrLkgf33AHyEIn6lhyf5cCfwwiGtkWhNpVKlEZApi3inGQR/barWnY7qY8FbGKBO7p3JA==", + "dev": true, + "requires": { + "@typescript-eslint/scope-manager": "5.42.0", + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/typescript-estree": "5.42.0", + "debug": "^4.3.4" + } + }, + "@typescript-eslint/scope-manager": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.42.0.tgz", + "integrity": "sha512-l5/3IBHLH0Bv04y+H+zlcLiEMEMjWGaCX6WyHE5Uk2YkSGAMlgdUPsT/ywTSKgu9D1dmmKMYgYZijObfA39Wow==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/visitor-keys": "5.42.0" + } + }, + "@typescript-eslint/type-utils": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.42.0.tgz", + "integrity": "sha512-HW14TXC45dFVZxnVW8rnUGnvYyRC0E/vxXShFCthcC9VhVTmjqOmtqj6H5rm9Zxv+ORxKA/1aLGD7vmlLsdlOg==", + "dev": true, + "requires": { + "@typescript-eslint/typescript-estree": "5.42.0", + "@typescript-eslint/utils": "5.42.0", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/types": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.42.0.tgz", + "integrity": "sha512-t4lzO9ZOAUcHY6bXQYRuu+3SSYdD9TS8ooApZft4WARt4/f2Cj/YpvbTe8A4GuhT4bNW72goDMOy7SW71mZwGw==", + "dev": true + }, + "@typescript-eslint/typescript-estree": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.42.0.tgz", + "integrity": "sha512-2O3vSq794x3kZGtV7i4SCWZWCwjEtkWfVqX4m5fbUBomOsEOyd6OAD1qU2lbvV5S8tgy/luJnOYluNyYVeOTTg==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/visitor-keys": "5.42.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/utils": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.42.0.tgz", + "integrity": "sha512-JZ++3+h1vbeG1NUECXQZE3hg0kias9kOtcQr3+JVQ3whnjvKuMyktJAAIj6743OeNPnGBmjj7KEmiDL7qsdnCQ==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.9", + "@types/semver": "^7.3.12", + "@typescript-eslint/scope-manager": "5.42.0", + "@typescript-eslint/types": "5.42.0", + "@typescript-eslint/typescript-estree": "5.42.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^3.0.0", + "semver": "^7.3.7" + } + }, + "@typescript-eslint/visitor-keys": { + "version": "5.42.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.42.0.tgz", + "integrity": "sha512-QHbu5Hf/2lOEOwy+IUw0GoSCuAzByTAWWrOTKzTzsotiUnWFpuKnXcAhC9YztAf2EElQ0VvIK+pHJUPkM0q7jg==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.42.0", + "eslint-visitor-keys": "^3.3.0" + } + }, + "@vscode/test-electron": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.4.tgz", + "integrity": "sha512-eWzIqXMhvlcoXfEFNWrVu/yYT5w6De+WZXR/bafUQhAp8+8GkQo95Oe14phwiRUPv8L+geAKl/QM2+PoT3YW3g==", + "dev": true, + "requires": { + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "jszip": "^3.10.1", + "semver": "^7.5.2" + } + }, + "acorn": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", + "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==", + "dev": true + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "requires": {} + }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "requires": { + "path-type": "^4.0.0" + } + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "eslint": { + "version": "8.26.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.26.0.tgz", + "integrity": "sha512-kzJkpaw1Bfwheq4VXUezFriD1GxszX6dUekM7Z3aC2o4hju+tsR/XyTC3RcoSD7jmy9VkPU3+N6YjVU2e96Oyg==", + "dev": true, + "requires": { + "@eslint/eslintrc": "^1.3.3", + "@humanwhocodes/config-array": "^0.11.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.4.0", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.15.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + }, + "dependencies": { + "eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^2.0.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true + } + } + }, + "eslint-visitor-keys": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "dev": true + }, + "espree": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.0.tgz", + "integrity": "sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw==", + "dev": true, + "requires": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + } + }, + "esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "dependencies": { + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + } + } + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, + "file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "requires": { + "flat-cache": "^3.0.4" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "requires": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + } + }, + "flatted": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", + "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" + } + }, + "globals": { + "version": "13.17.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", + "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, + "globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "requires": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + } + }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + } + }, + "https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true + }, + "immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "dev": true + }, + "import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "js-sdsl": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.1.5.tgz", + "integrity": "sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, + "jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "dev": true, + "requires": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + } + }, + "lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dev": true, + "requires": { + "immediate": "~3.0.5" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true + }, + "micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "requires": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "natural-compare-lite": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", + "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, + "type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + }, + "typescript": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.3.tgz", + "integrity": "sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==", + "dev": true + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } + } +} diff --git a/vscode-ext/package.json b/vscode-ext/package.json new file mode 100644 index 000000000..f1b7826ba --- /dev/null +++ b/vscode-ext/package.json @@ -0,0 +1,80 @@ +{ + "name": "gloo", + "displayName": "Gloo", + "description": "Gloo intellisense", + "version": "0.3.5", + "publisher": "Gloo", + "repository": "https://github.com/GlooHQ/gloo-lang", + "homepage": "https://trygloo.com", + "icon": "logo.png", + "engines": { + "vscode": "^1.74.0" + }, + "categories": [ + "Other" + ], + "activationEvents": [], + "main": "./out/extension.js", + "contributes": { + "configuration": { + "title": "Gloo extension settings", + "properties": { + "gloo.path": { + "type": "string", + "default": "gloo", + "description": "Override the gloo CLI path" + } + } + }, + "languages": [ + { + "id": "gloo", + "aliases": [ + "Gloo", + "gloo" + ], + "extensions": [ + ".gloo" + ], + "configuration": "./language-configuration.json" + } + ], + "grammars": [ + { + "language": "gloo", + "scopeName": "source.gloo", + "path": "./syntaxes/gloo.tmLanguage.json" + } + ], + "activationEvents": [ + "onLanguage:gloo" + ], + "snippets": [ + { + "language": "gloo", + "path": "./snippets/glooSnippets.json" + } + ] + }, + "scripts": { + "vscode:prepublish": "pnpm run compile", + "compile": "tsc -p ./", + "lint": "eslint \"src/**/*.ts\"", + "watch": "tsc -watch -p ./", + "build:dev": "pnpm version prerelease --preid=dev --no-git-tag-version && vsce package", + "build:beta": "pnpm version prerelease --preid=beta && vsce package && pnpm version patch", + "build:stable": "vsce package" + }, + "devDependencies": { + "@types/glob": "^8.1.0", + "@types/mocha": "^10.0.1", + "@types/node": "^16.18.46", + "@types/vscode": "^1.73.0", + "@typescript-eslint/eslint-plugin": "^5.42.0", + "@typescript-eslint/parser": "^5.42.0", + "@vscode/test-electron": "^2.3.4", + "@vscode/vsce": "^2.21.1", + "eslint": "^8.26.0", + "typescript": "^5.1.3" + } +} diff --git a/vscode-ext/pnpm-lock.yaml b/vscode-ext/pnpm-lock.yaml new file mode 100644 index 000000000..125147a25 --- /dev/null +++ b/vscode-ext/pnpm-lock.yaml @@ -0,0 +1,1719 @@ +lockfileVersion: '6.1' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +devDependencies: + '@types/glob': + specifier: ^8.1.0 + version: 8.1.0 + '@types/mocha': + specifier: ^10.0.1 + version: 10.0.1 + '@types/node': + specifier: ^16.18.46 + version: 16.18.46 + '@types/vscode': + specifier: ^1.73.0 + version: 1.73.0 + '@typescript-eslint/eslint-plugin': + specifier: ^5.42.0 + version: 5.42.0(@typescript-eslint/parser@5.42.0)(eslint@8.26.0)(typescript@5.1.3) + '@typescript-eslint/parser': + specifier: ^5.42.0 + version: 5.42.0(eslint@8.26.0)(typescript@5.1.3) + '@vscode/test-electron': + specifier: ^2.3.4 + version: 2.3.4 + '@vscode/vsce': + specifier: ^2.21.1 + version: 2.21.1 + eslint: + specifier: ^8.26.0 + version: 8.26.0 + typescript: + specifier: ^5.1.3 + version: 5.1.3 + +packages: + + /@aashutoshrathi/word-wrap@1.2.6: + resolution: {integrity: sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==} + engines: {node: '>=0.10.0'} + dev: true + + /@eslint/eslintrc@1.4.1: + resolution: {integrity: sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + ajv: 6.12.6 + debug: 4.3.4 + espree: 9.6.1 + globals: 13.23.0 + ignore: 5.2.4 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@humanwhocodes/config-array@0.11.11: + resolution: {integrity: sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==} + engines: {node: '>=10.10.0'} + dependencies: + '@humanwhocodes/object-schema': 1.2.1 + debug: 4.3.4 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@humanwhocodes/module-importer@1.0.1: + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + dev: true + + /@humanwhocodes/object-schema@1.2.1: + resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==} + dev: true + + /@nodelib/fs.scandir@2.1.5: + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + dev: true + + /@nodelib/fs.stat@2.0.5: + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + dev: true + + /@nodelib/fs.walk@1.2.8: + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.15.0 + dev: true + + /@tootallnate/once@1.1.2: + resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==} + engines: {node: '>= 6'} + dev: true + + /@types/glob@8.1.0: + resolution: {integrity: sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==} + dependencies: + '@types/minimatch': 5.1.2 + '@types/node': 16.18.46 + dev: true + + /@types/json-schema@7.0.13: + resolution: {integrity: sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==} + dev: true + + /@types/minimatch@5.1.2: + resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} + dev: true + + /@types/mocha@10.0.1: + resolution: {integrity: sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==} + dev: true + + /@types/node@16.18.46: + resolution: {integrity: sha512-Mnq3O9Xz52exs3mlxMcQuA7/9VFe/dXcrgAyfjLkABIqxXKOgBRjyazTxUbjsxDa4BP7hhPliyjVTP9RDP14xg==} + dev: true + + /@types/semver@7.5.3: + resolution: {integrity: sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw==} + dev: true + + /@types/vscode@1.73.0: + resolution: {integrity: sha512-FhkfF7V3fj7S3WqXu7AxFesBLO3uMkdCPJJPbwyZXezv2xJ6xBWHYM2CmkkbO8wT9Fr3KipwxGGOoQRrYq7mHg==} + dev: true + + /@typescript-eslint/eslint-plugin@5.42.0(@typescript-eslint/parser@5.42.0)(eslint@8.26.0)(typescript@5.1.3): + resolution: {integrity: sha512-5TJh2AgL6+wpL8H/GTSjNb4WrjKoR2rqvFxR/DDTqYNk6uXn8BJMEcncLSpMbf/XV1aS0jAjYwn98uvVCiAywQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + '@typescript-eslint/parser': ^5.0.0 + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/parser': 5.42.0(eslint@8.26.0)(typescript@5.1.3) + '@typescript-eslint/scope-manager': 5.42.0 + '@typescript-eslint/type-utils': 5.42.0(eslint@8.26.0)(typescript@5.1.3) + '@typescript-eslint/utils': 5.42.0(eslint@8.26.0)(typescript@5.1.3) + debug: 4.3.4 + eslint: 8.26.0 + ignore: 5.2.4 + natural-compare-lite: 1.4.0 + regexpp: 3.2.0 + semver: 7.5.4 + tsutils: 3.21.0(typescript@5.1.3) + typescript: 5.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/parser@5.42.0(eslint@8.26.0)(typescript@5.1.3): + resolution: {integrity: sha512-Ixh9qrOTDRctFg3yIwrLkgf33AHyEIn6lhyf5cCfwwiGtkWhNpVKlEZApi3inGQR/barWnY7qY8FbGKBO7p3JA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/scope-manager': 5.42.0 + '@typescript-eslint/types': 5.42.0 + '@typescript-eslint/typescript-estree': 5.42.0(typescript@5.1.3) + debug: 4.3.4 + eslint: 8.26.0 + typescript: 5.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/scope-manager@5.42.0: + resolution: {integrity: sha512-l5/3IBHLH0Bv04y+H+zlcLiEMEMjWGaCX6WyHE5Uk2YkSGAMlgdUPsT/ywTSKgu9D1dmmKMYgYZijObfA39Wow==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + '@typescript-eslint/types': 5.42.0 + '@typescript-eslint/visitor-keys': 5.42.0 + dev: true + + /@typescript-eslint/type-utils@5.42.0(eslint@8.26.0)(typescript@5.1.3): + resolution: {integrity: sha512-HW14TXC45dFVZxnVW8rnUGnvYyRC0E/vxXShFCthcC9VhVTmjqOmtqj6H5rm9Zxv+ORxKA/1aLGD7vmlLsdlOg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: '*' + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/typescript-estree': 5.42.0(typescript@5.1.3) + '@typescript-eslint/utils': 5.42.0(eslint@8.26.0)(typescript@5.1.3) + debug: 4.3.4 + eslint: 8.26.0 + tsutils: 3.21.0(typescript@5.1.3) + typescript: 5.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/types@5.42.0: + resolution: {integrity: sha512-t4lzO9ZOAUcHY6bXQYRuu+3SSYdD9TS8ooApZft4WARt4/f2Cj/YpvbTe8A4GuhT4bNW72goDMOy7SW71mZwGw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dev: true + + /@typescript-eslint/typescript-estree@5.42.0(typescript@5.1.3): + resolution: {integrity: sha512-2O3vSq794x3kZGtV7i4SCWZWCwjEtkWfVqX4m5fbUBomOsEOyd6OAD1qU2lbvV5S8tgy/luJnOYluNyYVeOTTg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/types': 5.42.0 + '@typescript-eslint/visitor-keys': 5.42.0 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.5.4 + tsutils: 3.21.0(typescript@5.1.3) + typescript: 5.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/utils@5.42.0(eslint@8.26.0)(typescript@5.1.3): + resolution: {integrity: sha512-JZ++3+h1vbeG1NUECXQZE3hg0kias9kOtcQr3+JVQ3whnjvKuMyktJAAIj6743OeNPnGBmjj7KEmiDL7qsdnCQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + '@types/json-schema': 7.0.13 + '@types/semver': 7.5.3 + '@typescript-eslint/scope-manager': 5.42.0 + '@typescript-eslint/types': 5.42.0 + '@typescript-eslint/typescript-estree': 5.42.0(typescript@5.1.3) + eslint: 8.26.0 + eslint-scope: 5.1.1 + eslint-utils: 3.0.0(eslint@8.26.0) + semver: 7.5.4 + transitivePeerDependencies: + - supports-color + - typescript + dev: true + + /@typescript-eslint/visitor-keys@5.42.0: + resolution: {integrity: sha512-QHbu5Hf/2lOEOwy+IUw0GoSCuAzByTAWWrOTKzTzsotiUnWFpuKnXcAhC9YztAf2EElQ0VvIK+pHJUPkM0q7jg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + '@typescript-eslint/types': 5.42.0 + eslint-visitor-keys: 3.4.3 + dev: true + + /@vscode/test-electron@2.3.4: + resolution: {integrity: sha512-eWzIqXMhvlcoXfEFNWrVu/yYT5w6De+WZXR/bafUQhAp8+8GkQo95Oe14phwiRUPv8L+geAKl/QM2+PoT3YW3g==} + engines: {node: '>=16'} + dependencies: + http-proxy-agent: 4.0.1 + https-proxy-agent: 5.0.1 + jszip: 3.10.1 + semver: 7.5.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@vscode/vsce@2.21.1: + resolution: {integrity: sha512-f45/aT+HTubfCU2oC7IaWnH9NjOWp668ML002QiFObFRVUCoLtcwepp9mmql/ArFUy+HCHp54Xrq4koTcOD6TA==} + engines: {node: '>= 14'} + hasBin: true + dependencies: + azure-devops-node-api: 11.2.0 + chalk: 2.4.2 + cheerio: 1.0.0-rc.12 + commander: 6.2.1 + glob: 7.2.3 + hosted-git-info: 4.1.0 + jsonc-parser: 3.2.0 + leven: 3.1.0 + markdown-it: 12.3.2 + mime: 1.6.0 + minimatch: 3.1.2 + parse-semver: 1.1.1 + read: 1.0.7 + semver: 7.5.4 + tmp: 0.2.1 + typed-rest-client: 1.8.11 + url-join: 4.0.1 + xml2js: 0.5.0 + yauzl: 2.10.0 + yazl: 2.5.1 + optionalDependencies: + keytar: 7.9.0 + dev: true + + /acorn-jsx@5.3.2(acorn@8.10.0): + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + acorn: 8.10.0 + dev: true + + /acorn@8.10.0: + resolution: {integrity: sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + dev: true + + /ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + dev: true + + /ansi-styles@3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} + dependencies: + color-convert: 1.9.3 + dev: true + + /ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + dependencies: + color-convert: 2.0.1 + dev: true + + /argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + dev: true + + /array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + dev: true + + /azure-devops-node-api@11.2.0: + resolution: {integrity: sha512-XdiGPhrpaT5J8wdERRKs5g8E0Zy1pvOYTli7z9E8nmOn3YGp4FhtjhrOyFmX/8veWCwdI69mCHKJw6l+4J/bHA==} + dependencies: + tunnel: 0.0.6 + typed-rest-client: 1.8.11 + dev: true + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: true + + /base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + dev: true + optional: true + + /bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: true + optional: true + + /boolbase@1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + dev: true + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + dependencies: + fill-range: 7.0.1 + dev: true + + /buffer-crc32@0.2.13: + resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==} + dev: true + + /buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: true + optional: true + + /call-bind@1.0.2: + resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.2.1 + dev: true + + /callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + dev: true + + /chalk@2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + dev: true + + /chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: true + + /cheerio-select@2.1.0: + resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} + dependencies: + boolbase: 1.0.0 + css-select: 5.1.0 + css-what: 6.1.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.1.0 + dev: true + + /cheerio@1.0.0-rc.12: + resolution: {integrity: sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==} + engines: {node: '>= 6'} + dependencies: + cheerio-select: 2.1.0 + dom-serializer: 2.0.0 + domhandler: 5.0.3 + domutils: 3.1.0 + htmlparser2: 8.0.2 + parse5: 7.1.2 + parse5-htmlparser2-tree-adapter: 7.0.0 + dev: true + + /chownr@1.1.4: + resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} + dev: true + optional: true + + /color-convert@1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + dependencies: + color-name: 1.1.3 + dev: true + + /color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + dependencies: + color-name: 1.1.4 + dev: true + + /color-name@1.1.3: + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + dev: true + + /color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + dev: true + + /commander@6.2.1: + resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} + engines: {node: '>= 6'} + dev: true + + /concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + dev: true + + /core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + dev: true + + /cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /css-select@5.1.0: + resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==} + dependencies: + boolbase: 1.0.0 + css-what: 6.1.0 + domhandler: 5.0.3 + domutils: 3.1.0 + nth-check: 2.1.1 + dev: true + + /css-what@6.1.0: + resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} + engines: {node: '>= 6'} + dev: true + + /debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: true + + /decompress-response@6.0.0: + resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} + engines: {node: '>=10'} + dependencies: + mimic-response: 3.1.0 + dev: true + optional: true + + /deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + dev: true + optional: true + + /deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + dev: true + + /detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + dev: true + optional: true + + /dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + dependencies: + path-type: 4.0.0 + dev: true + + /doctrine@3.0.0: + resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} + engines: {node: '>=6.0.0'} + dependencies: + esutils: 2.0.3 + dev: true + + /dom-serializer@2.0.0: + resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + entities: 4.5.0 + dev: true + + /domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + dev: true + + /domhandler@5.0.3: + resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} + engines: {node: '>= 4'} + dependencies: + domelementtype: 2.3.0 + dev: true + + /domutils@3.1.0: + resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + dependencies: + dom-serializer: 2.0.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + dev: true + + /end-of-stream@1.4.4: + resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + dependencies: + once: 1.4.0 + dev: true + optional: true + + /entities@2.1.0: + resolution: {integrity: sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==} + dev: true + + /entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + dev: true + + /escape-string-regexp@1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + dev: true + + /escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + dev: true + + /eslint-scope@5.1.1: + resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} + engines: {node: '>=8.0.0'} + dependencies: + esrecurse: 4.3.0 + estraverse: 4.3.0 + dev: true + + /eslint-scope@7.2.2: + resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + dev: true + + /eslint-utils@3.0.0(eslint@8.26.0): + resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==} + engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0} + peerDependencies: + eslint: '>=5' + dependencies: + eslint: 8.26.0 + eslint-visitor-keys: 2.1.0 + dev: true + + /eslint-visitor-keys@2.1.0: + resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} + engines: {node: '>=10'} + dev: true + + /eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dev: true + + /eslint@8.26.0: + resolution: {integrity: sha512-kzJkpaw1Bfwheq4VXUezFriD1GxszX6dUekM7Z3aC2o4hju+tsR/XyTC3RcoSD7jmy9VkPU3+N6YjVU2e96Oyg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + hasBin: true + dependencies: + '@eslint/eslintrc': 1.4.1 + '@humanwhocodes/config-array': 0.11.11 + '@humanwhocodes/module-importer': 1.0.1 + '@nodelib/fs.walk': 1.2.8 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.4 + doctrine: 3.0.0 + escape-string-regexp: 4.0.0 + eslint-scope: 7.2.2 + eslint-utils: 3.0.0(eslint@8.26.0) + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + esquery: 1.5.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + find-up: 5.0.0 + glob-parent: 6.0.2 + globals: 13.23.0 + grapheme-splitter: 1.0.4 + ignore: 5.2.4 + import-fresh: 3.3.0 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + is-path-inside: 3.0.3 + js-sdsl: 4.4.2 + js-yaml: 4.1.0 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.3 + regexpp: 3.2.0 + strip-ansi: 6.0.1 + strip-json-comments: 3.1.1 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + acorn: 8.10.0 + acorn-jsx: 5.3.2(acorn@8.10.0) + eslint-visitor-keys: 3.4.3 + dev: true + + /esquery@1.5.0: + resolution: {integrity: sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==} + engines: {node: '>=0.10'} + dependencies: + estraverse: 5.3.0 + dev: true + + /esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + dependencies: + estraverse: 5.3.0 + dev: true + + /estraverse@4.3.0: + resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} + engines: {node: '>=4.0'} + dev: true + + /estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + dev: true + + /esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + dev: true + + /expand-template@2.0.3: + resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} + engines: {node: '>=6'} + dev: true + optional: true + + /fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + dev: true + + /fast-glob@3.3.1: + resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} + engines: {node: '>=8.6.0'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + dev: true + + /fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + dev: true + + /fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + dev: true + + /fastq@1.15.0: + resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} + dependencies: + reusify: 1.0.4 + dev: true + + /fd-slicer@1.1.0: + resolution: {integrity: sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==} + dependencies: + pend: 1.2.0 + dev: true + + /file-entry-cache@6.0.1: + resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} + engines: {node: ^10.12.0 || >=12.0.0} + dependencies: + flat-cache: 3.1.0 + dev: true + + /fill-range@7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} + dependencies: + to-regex-range: 5.0.1 + dev: true + + /find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + dev: true + + /flat-cache@3.1.0: + resolution: {integrity: sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew==} + engines: {node: '>=12.0.0'} + dependencies: + flatted: 3.2.9 + keyv: 4.5.3 + rimraf: 3.0.2 + dev: true + + /flatted@3.2.9: + resolution: {integrity: sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==} + dev: true + + /fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + dev: true + optional: true + + /fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + dev: true + + /function-bind@1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + dev: true + + /get-intrinsic@1.2.1: + resolution: {integrity: sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==} + dependencies: + function-bind: 1.1.1 + has: 1.0.4 + has-proto: 1.0.1 + has-symbols: 1.0.3 + dev: true + + /github-from-package@0.0.0: + resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} + dev: true + optional: true + + /glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + dependencies: + is-glob: 4.0.3 + dev: true + + /glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + dependencies: + is-glob: 4.0.3 + dev: true + + /glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /globals@13.23.0: + resolution: {integrity: sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==} + engines: {node: '>=8'} + dependencies: + type-fest: 0.20.2 + dev: true + + /globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.1 + ignore: 5.2.4 + merge2: 1.4.1 + slash: 3.0.0 + dev: true + + /grapheme-splitter@1.0.4: + resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} + dev: true + + /has-flag@3.0.0: + resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} + engines: {node: '>=4'} + dev: true + + /has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + dev: true + + /has-proto@1.0.1: + resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} + engines: {node: '>= 0.4'} + dev: true + + /has-symbols@1.0.3: + resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + engines: {node: '>= 0.4'} + dev: true + + /has@1.0.4: + resolution: {integrity: sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==} + engines: {node: '>= 0.4.0'} + dev: true + + /hosted-git-info@4.1.0: + resolution: {integrity: sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==} + engines: {node: '>=10'} + dependencies: + lru-cache: 6.0.0 + dev: true + + /htmlparser2@8.0.2: + resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.1.0 + entities: 4.5.0 + dev: true + + /http-proxy-agent@4.0.1: + resolution: {integrity: sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==} + engines: {node: '>= 6'} + dependencies: + '@tootallnate/once': 1.1.2 + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + dependencies: + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: true + optional: true + + /ignore@5.2.4: + resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} + engines: {node: '>= 4'} + dev: true + + /immediate@3.0.6: + resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} + dev: true + + /import-fresh@3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + dev: true + + /imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + dev: true + + /inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: true + + /inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + dev: true + + /ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + dev: true + optional: true + + /is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + dev: true + + /is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + dependencies: + is-extglob: 2.1.1 + dev: true + + /is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + dev: true + + /is-path-inside@3.0.3: + resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} + engines: {node: '>=8'} + dev: true + + /isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + dev: true + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: true + + /js-sdsl@4.4.2: + resolution: {integrity: sha512-dwXFwByc/ajSV6m5bcKAPwe4yDDF6D614pxmIi5odytzxRlwqF6nwoiCek80Ixc7Cvma5awClxrzFtxCQvcM8w==} + dev: true + + /js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + dependencies: + argparse: 2.0.1 + dev: true + + /json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + dev: true + + /json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + dev: true + + /json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + dev: true + + /jsonc-parser@3.2.0: + resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} + dev: true + + /jszip@3.10.1: + resolution: {integrity: sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==} + dependencies: + lie: 3.3.0 + pako: 1.0.11 + readable-stream: 2.3.8 + setimmediate: 1.0.5 + dev: true + + /keytar@7.9.0: + resolution: {integrity: sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==} + requiresBuild: true + dependencies: + node-addon-api: 4.3.0 + prebuild-install: 7.1.1 + dev: true + optional: true + + /keyv@4.5.3: + resolution: {integrity: sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug==} + dependencies: + json-buffer: 3.0.1 + dev: true + + /leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + dev: true + + /levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + dev: true + + /lie@3.3.0: + resolution: {integrity: sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==} + dependencies: + immediate: 3.0.6 + dev: true + + /linkify-it@3.0.3: + resolution: {integrity: sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==} + dependencies: + uc.micro: 1.0.6 + dev: true + + /locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + dependencies: + p-locate: 5.0.0 + dev: true + + /lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + dev: true + + /lru-cache@6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} + dependencies: + yallist: 4.0.0 + dev: true + + /markdown-it@12.3.2: + resolution: {integrity: sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==} + hasBin: true + dependencies: + argparse: 2.0.1 + entities: 2.1.0 + linkify-it: 3.0.3 + mdurl: 1.0.1 + uc.micro: 1.0.6 + dev: true + + /mdurl@1.0.1: + resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} + dev: true + + /merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + dev: true + + /micromatch@4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + dev: true + + /mime@1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /mimic-response@3.1.0: + resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} + engines: {node: '>=10'} + dev: true + optional: true + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + dev: true + optional: true + + /mkdirp-classic@0.5.3: + resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} + dev: true + optional: true + + /ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true + + /mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + dev: true + + /napi-build-utils@1.0.2: + resolution: {integrity: sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==} + dev: true + optional: true + + /natural-compare-lite@1.4.0: + resolution: {integrity: sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==} + dev: true + + /natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + dev: true + + /node-abi@3.47.0: + resolution: {integrity: sha512-2s6B2CWZM//kPgwnuI0KrYwNjfdByE25zvAaEpq9IH4zcNsarH8Ihu/UuX6XMPEogDAxkuUFeZn60pXNHAqn3A==} + engines: {node: '>=10'} + dependencies: + semver: 7.5.4 + dev: true + optional: true + + /node-addon-api@4.3.0: + resolution: {integrity: sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==} + dev: true + optional: true + + /nth-check@2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + dependencies: + boolbase: 1.0.0 + dev: true + + /object-inspect@1.12.3: + resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==} + dev: true + + /once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + dev: true + + /optionator@0.9.3: + resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==} + engines: {node: '>= 0.8.0'} + dependencies: + '@aashutoshrathi/word-wrap': 1.2.6 + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + dev: true + + /p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + dependencies: + yocto-queue: 0.1.0 + dev: true + + /p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + dependencies: + p-limit: 3.1.0 + dev: true + + /pako@1.0.11: + resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==} + dev: true + + /parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + dependencies: + callsites: 3.1.0 + dev: true + + /parse-semver@1.1.1: + resolution: {integrity: sha512-Eg1OuNntBMH0ojvEKSrvDSnwLmvVuUOSdylH/pSCPNMIspLlweJyIWXCE+k/5hm3cj/EBUYwmWkjhBALNP4LXQ==} + dependencies: + semver: 5.7.2 + dev: true + + /parse5-htmlparser2-tree-adapter@7.0.0: + resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} + dependencies: + domhandler: 5.0.3 + parse5: 7.1.2 + dev: true + + /parse5@7.1.2: + resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} + dependencies: + entities: 4.5.0 + dev: true + + /path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + dev: true + + /path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + dev: true + + /path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: true + + /path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + dev: true + + /pend@1.2.0: + resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==} + dev: true + + /picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: true + + /prebuild-install@7.1.1: + resolution: {integrity: sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==} + engines: {node: '>=10'} + hasBin: true + dependencies: + detect-libc: 2.0.2 + expand-template: 2.0.3 + github-from-package: 0.0.0 + minimist: 1.2.8 + mkdirp-classic: 0.5.3 + napi-build-utils: 1.0.2 + node-abi: 3.47.0 + pump: 3.0.0 + rc: 1.2.8 + simple-get: 4.0.1 + tar-fs: 2.1.1 + tunnel-agent: 0.6.0 + dev: true + optional: true + + /prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + dev: true + + /process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + dev: true + + /pump@3.0.0: + resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} + dependencies: + end-of-stream: 1.4.4 + once: 1.4.0 + dev: true + optional: true + + /punycode@2.3.0: + resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} + engines: {node: '>=6'} + dev: true + + /qs@6.11.2: + resolution: {integrity: sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA==} + engines: {node: '>=0.6'} + dependencies: + side-channel: 1.0.4 + dev: true + + /queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + dev: true + + /rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + dev: true + optional: true + + /read@1.0.7: + resolution: {integrity: sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==} + engines: {node: '>=0.8'} + dependencies: + mute-stream: 0.0.8 + dev: true + + /readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + dev: true + + /readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + dev: true + optional: true + + /regexpp@3.2.0: + resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} + engines: {node: '>=8'} + dev: true + + /resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + dev: true + + /reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + dev: true + + /rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + hasBin: true + dependencies: + glob: 7.2.3 + dev: true + + /run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + dependencies: + queue-microtask: 1.2.3 + dev: true + + /safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + dev: true + + /sax@1.3.0: + resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} + dev: true + + /semver@5.7.2: + resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} + hasBin: true + dev: true + + /semver@7.5.4: + resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} + engines: {node: '>=10'} + hasBin: true + dependencies: + lru-cache: 6.0.0 + dev: true + + /setimmediate@1.0.5: + resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==} + dev: true + + /shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: true + + /side-channel@1.0.4: + resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.1 + object-inspect: 1.12.3 + dev: true + + /simple-concat@1.0.1: + resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} + dev: true + optional: true + + /simple-get@4.0.1: + resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==} + dependencies: + decompress-response: 6.0.0 + once: 1.4.0 + simple-concat: 1.0.1 + dev: true + optional: true + + /slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + dev: true + + /string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + dependencies: + safe-buffer: 5.1.2 + dev: true + + /strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + dependencies: + ansi-regex: 5.0.1 + dev: true + + /strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + dev: true + optional: true + + /strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + dev: true + + /supports-color@5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} + dependencies: + has-flag: 3.0.0 + dev: true + + /supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + dependencies: + has-flag: 4.0.0 + dev: true + + /tar-fs@2.1.1: + resolution: {integrity: sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==} + dependencies: + chownr: 1.1.4 + mkdirp-classic: 0.5.3 + pump: 3.0.0 + tar-stream: 2.2.0 + dev: true + optional: true + + /tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.4 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: true + optional: true + + /text-table@0.2.0: + resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} + dev: true + + /tmp@0.2.1: + resolution: {integrity: sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==} + engines: {node: '>=8.17.0'} + dependencies: + rimraf: 3.0.2 + dev: true + + /to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + dependencies: + is-number: 7.0.0 + dev: true + + /tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + dev: true + + /tsutils@3.21.0(typescript@5.1.3): + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' + dependencies: + tslib: 1.14.1 + typescript: 5.1.3 + dev: true + + /tunnel-agent@0.6.0: + resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} + dependencies: + safe-buffer: 5.1.2 + dev: true + optional: true + + /tunnel@0.0.6: + resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} + engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} + dev: true + + /type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.2.1 + dev: true + + /type-fest@0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} + dev: true + + /typed-rest-client@1.8.11: + resolution: {integrity: sha512-5UvfMpd1oelmUPRbbaVnq+rHP7ng2cE4qoQkQeAqxRL6PklkxsM0g32/HL0yfvruK6ojQ5x8EE+HF4YV6DtuCA==} + dependencies: + qs: 6.11.2 + tunnel: 0.0.6 + underscore: 1.13.6 + dev: true + + /typescript@5.1.3: + resolution: {integrity: sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==} + engines: {node: '>=14.17'} + hasBin: true + dev: true + + /uc.micro@1.0.6: + resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==} + dev: true + + /underscore@1.13.6: + resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==} + dev: true + + /uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + dependencies: + punycode: 2.3.0 + dev: true + + /url-join@4.0.1: + resolution: {integrity: sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==} + dev: true + + /util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + dev: true + + /which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + dev: true + + /xml2js@0.5.0: + resolution: {integrity: sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==} + engines: {node: '>=4.0.0'} + dependencies: + sax: 1.3.0 + xmlbuilder: 11.0.1 + dev: true + + /xmlbuilder@11.0.1: + resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==} + engines: {node: '>=4.0'} + dev: true + + /yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + dev: true + + /yauzl@2.10.0: + resolution: {integrity: sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==} + dependencies: + buffer-crc32: 0.2.13 + fd-slicer: 1.1.0 + dev: true + + /yazl@2.5.1: + resolution: {integrity: sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==} + dependencies: + buffer-crc32: 0.2.13 + dev: true + + /yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + dev: true diff --git a/vscode-ext/snippets/glooSnippets.json b/vscode-ext/snippets/glooSnippets.json new file mode 100644 index 000000000..6c2a9a51c --- /dev/null +++ b/vscode-ext/snippets/glooSnippets.json @@ -0,0 +1,106 @@ +{ + "@case": { + "prefix": "@case", + "body": ["@case ${1:TestCaseName} {", "\t$0", "}"], + "description": "Snippet for @case block" + }, + "@client definition": { + "prefix": "@client", + "body": ["@client[${1:ModelType}] ${2:ClientName} {", "\t$0", "}"], + "description": "Snippet for @client definition" + }, + "@client type": { + "prefix": "@client", + "body": ["@client[${1:ModelType}] ${2:ClientName}"], + "description": "Snippet for @client type" + }, + "@class": { + "prefix": "@class", + "body": ["@class ${1:ClassName} {", "\t$0", "}"], + "description": "Snippet for @class definition" + }, + "@describe": { + "prefix": "@describe", + "body": ["@describe{$0}"], + "description": "Snippet for @describe" + }, + "@enum": { + "prefix": "@enum", + "body": ["@enum ${1:EnumName} {", "\t$0", "}"], + "description": "Snippet for @enum definition" + }, + "@function": { + "prefix": "@function", + "body": [ + "@function ${1:FunctionName} {", + "\t@input ${2:InputType}", + "\t@output ${3:OutputType}", + "\t$0", + "}" + ], + "description": "Custom Gloo function snippet" + }, + "@input block": { + "prefix": "@input", + "body": ["@input {", "\t$0", "}"], + "description": "Snippet for @input block" + }, + "@input type": { + "prefix": "@input", + "body": ["@input $0"], + "description": "Snippet for @input type" + }, + "@lang block": { + "prefix": "@lang", + "body": ["@lang[${1:Language}] {", "\t$0", "}"], + "description": "Snippet for @prompt block" + }, + "@method block": { + "prefix": "@method", + "body": ["@method ${1:MethodName} {", "\t$0", "}"] + }, + "@output": { + "prefix": "@output", + "body": ["@output {", "\t$0", "}"], + "description": "Snippet for @output block" + }, + "@output type": { + "prefix": "@output", + "body": ["@output $0"], + "description": "Snippet for @output type" + }, + "@prompt block": { + "prefix": "@prompt", + "body": ["@prompt {", "\t$0", "}"], + "description": "Snippet for @prompt block" + }, + "@skip": { + "prefix": "@skip", + "body": ["@skip"], + "name": "@skip" + }, + "@stringify": { + "prefix": "@stringify", + "body": ["@stringify ${1:OutputTypeName} {", "\t$0", "}"], + "description": "Snippet for @stringify block" + }, + "@rename": { + "prefix": "@rename", + "body": ["@rename{$0}"], + "description": "Snippet for @rename" + }, + "@test_group": { + "prefix": "@test_group", + "body": ["@test_group ${1:GroupName} for ${2:FunctionName} {", "\t$0", "}"], + "description": "Snippet for @test_group block" + }, + "@variant": { + "prefix": "@variant", + "body": [ + "@variant[${1:ModelType}] ${2:VariantName} for ${3:FunctionName} {", + "\t$0", + "}" + ], + "description": "Snippet for @variant block" + } +} diff --git a/vscode-ext/src/GlooCodeLensProvider.ts b/vscode-ext/src/GlooCodeLensProvider.ts new file mode 100644 index 000000000..fca4e189e --- /dev/null +++ b/vscode-ext/src/GlooCodeLensProvider.ts @@ -0,0 +1,37 @@ +import * as vscode from "vscode"; + +export class GlooCodeLensProvider implements vscode.CodeLensProvider { + provideCodeLenses(document: vscode.TextDocument): vscode.CodeLens[] { + const codeLenses: vscode.CodeLens[] = []; + + let insideTestGroupBlock = false; + + let braceCount = 0; + for (let line = 0; line < document.lineCount; line++) { + const lineText = document.lineAt(line).text; + + // Detect the start of a test group + if (lineText.includes("@test_group")) { + insideTestGroupBlock = true; + } + + // If we're inside a test group and we find an @input line, add a code lens + if (insideTestGroupBlock && lineText.includes("@input")) { + const range = new vscode.Range(line, 0, line, lineText.length); + const command: vscode.Command = { + title: "▶️ Run Test", + command: "extension.runGlooTest", + arguments: [document.uri], + }; + codeLenses.push(new vscode.CodeLens(range, command)); + } + + // Detect the end of the test group block + if (insideTestGroupBlock && lineText.trim() === "}") { + insideTestGroupBlock = false; + } + } + + return codeLenses; + } +} diff --git a/vscode-ext/src/KeywordCompletionProvider.ts b/vscode-ext/src/KeywordCompletionProvider.ts new file mode 100644 index 000000000..e9ad60079 --- /dev/null +++ b/vscode-ext/src/KeywordCompletionProvider.ts @@ -0,0 +1,82 @@ +import * as vscode from "vscode"; + +const keywords = [ + "@test_group", + "@input", + "@alias", + "@description", + "@skip", + "@stringify", + "@client", + "@method", + "@lang", + "@provider", +]; + +const commitCharacters = [ + "a", + "b", + "c", + "d", + "e", + "f", + "g", + "h", + "i", + "j", + "k", + "l", + "m", + "n", + "o", + "p", + "q", + "r", + "s", + "t", + "u", + "v", + "w", + "x", + "y", + "z", + "_", +]; + +export class KeywordCompletionProvider + implements vscode.CompletionItemProvider +{ + provideCompletionItems( + document: vscode.TextDocument, + position: vscode.Position, + token: vscode.CancellationToken, + context: vscode.CompletionContext + ): vscode.ProviderResult { + const line = document.lineAt(position).text; + const prefix = line.slice(0, position.character); + const match = prefix.match(/@(\w*)$/); + + if (match) { + const [, userTyped] = match; + + const startPos = position.translate(0, -userTyped.length - 1); // -1 to account for "@" + const endPos = position.translate(0, line.length - position.character); + const replaceRange = new vscode.Range(startPos, endPos); + + const completion = keywords + .filter((keyword) => keyword.startsWith(`@${userTyped}`)) + .map((keyword) => { + const item = new vscode.CompletionItem( + keyword, + vscode.CompletionItemKind.Keyword + ); + // item.insertText = keyword.slice(1); + item.range = replaceRange; + item.filterText = "@"; + return item; + }); + console.log(completion); + return completion; + } + } +} diff --git a/vscode-ext/src/extension.ts b/vscode-ext/src/extension.ts new file mode 100644 index 000000000..f3e5a0844 --- /dev/null +++ b/vscode-ext/src/extension.ts @@ -0,0 +1,120 @@ +import * as vscode from "vscode"; +import { KeywordCompletionProvider } from "./KeywordCompletionProvider"; +import { GlooCodeLensProvider } from "./GlooCodeLensProvider"; + +const { exec } = require("child_process"); + +function runGlooTest(uri: vscode.Uri): void { + const terminal = vscode.window.createTerminal(`Gloo Test: ${uri.fsPath}`); + terminal.sendText(`cd ${uri.fsPath}`); + terminal.sendText("gloo_cli test"); + terminal.show(); +} + +export function activate(context: vscode.ExtensionContext) { + const selector: vscode.DocumentSelector = { + pattern: "**/*.gloo", + scheme: "file", + }; + const config = vscode.workspace.getConfiguration("gloo"); + const glooPath = config.get("path", "gloo"); + + let disposable = vscode.workspace.onDidSaveTextDocument((document) => { + if (document.fileName.endsWith(".gloo")) { + runBuildScript(document, glooPath); + } + }); + + context.subscriptions.push(disposable); +} + +const diagnosticsCollection = + vscode.languages.createDiagnosticCollection("gloo"); + +const outputChannel = vscode.window.createOutputChannel("Gloo"); + +function runBuildScript(document: vscode.TextDocument, glooPath: string): void { + let buildCommand = `${glooPath} build`; + + let workspaceFolder = vscode.workspace.getWorkspaceFolder(document.uri); + + if (!workspaceFolder) { + return; + } + let options = { + cwd: workspaceFolder.uri.fsPath, + }; + + exec( + buildCommand, + options, + (error: Error | null, stdout: string, stderr: string) => { + if (stdout) { + outputChannel.appendLine(stdout); + } + if (error) { + vscode.window + .showErrorMessage( + `Error running the build script: ${error}`, + "Show Details" + ) + .then((selection) => { + if (selection === "Show Details") { + outputChannel.appendLine( + `Error running the build script: ${error}` + ); + outputChannel.show(true); + } + }); + return; + } + if (stderr) { + // Parse the error message to extract line number + const lineMatch = stderr.match(/Line (\d+),/); + if (lineMatch) { + const lineNumber = parseInt(lineMatch[1], 10) - 1; + + const range = new vscode.Range( + lineNumber, + 0, + lineNumber, + Number.MAX_VALUE + ); + const diagnostic = new vscode.Diagnostic( + range, + stderr, + vscode.DiagnosticSeverity.Error + ); + + diagnosticsCollection.set(document.uri, [diagnostic]); + } else { + vscode.window + .showErrorMessage(`Build error: ${stderr}`, "Show Details") + .then((selection) => { + if (selection === "Show Details") { + const outputChannel = vscode.window.createOutputChannel("Gloo"); + outputChannel.appendLine( + `Error running the build script: ${stderr}` + ); + outputChannel.show(true); + } + }); + } + return; + } + + // Clear any diagnostics if the build was successful + diagnosticsCollection.clear(); + + const infoMessage = vscode.window.showInformationMessage( + "Gloo build was successful" + ); + + setTimeout(() => { + infoMessage.then; + }, 5000); + } + ); +} + +export function deactivate() {} diff --git a/vscode-ext/src/test/runTest.ts b/vscode-ext/src/test/runTest.ts new file mode 100644 index 000000000..8a6ab0e15 --- /dev/null +++ b/vscode-ext/src/test/runTest.ts @@ -0,0 +1,23 @@ +import * as path from "path"; + +import { runTests } from "@vscode/test-electron"; + +async function main() { + try { + // The folder containing the Extension Manifest package.json + // Passed to `--extensionDevelopmentPath` + const extensionDevelopmentPath = path.resolve(__dirname, "../../"); + + // The path to test runner + // Passed to --extensionTestsPath + const extensionTestsPath = path.resolve(__dirname, "./suite/index"); + + // Download VS Code, unzip it and run the integration test + await runTests({ extensionDevelopmentPath, extensionTestsPath }); + } catch (err) { + console.error("Failed to run tests", err); + process.exit(1); + } +} + +main(); diff --git a/vscode-ext/src/test/suite/extension.test.ts b/vscode-ext/src/test/suite/extension.test.ts new file mode 100644 index 000000000..2f671d3c7 --- /dev/null +++ b/vscode-ext/src/test/suite/extension.test.ts @@ -0,0 +1,15 @@ +import * as assert from "assert"; + +// You can import and use all API from the 'vscode' module +// as well as import your extension to test it +import * as vscode from "vscode"; +// import * as myExtension from '../../extension'; + +suite("Extension Test Suite", () => { + vscode.window.showInformationMessage("Start all tests."); + + test("Sample test", () => { + assert.strictEqual(-1, [1, 2, 3].indexOf(5)); + assert.strictEqual(-1, [1, 2, 3].indexOf(0)); + }); +}); diff --git a/vscode-ext/src/test/suite/index.ts b/vscode-ext/src/test/suite/index.ts new file mode 100644 index 000000000..f0e27081d --- /dev/null +++ b/vscode-ext/src/test/suite/index.ts @@ -0,0 +1,40 @@ +// import * as path from "path"; +// import * as Mocha from "mocha"; +// import * as glob from "glob"; + +// export function run(): Promise { +// // Create the mocha test +// const mocha = new Mocha({ +// ui: "tdd", +// color: true, +// }); + +// const testsRoot = path.resolve(__dirname, ".."); + +// return new Promise((c, e) => { +// const testFiles = new glob.Glob("**/**.test.js", { cwd: testsRoot }); +// const testFileStream = testFiles.stream(); + +// testFileStream.on("data", (file) => { +// mocha.addFile(path.resolve(testsRoot, file)); +// }); +// testFileStream.on("error", (err) => { +// e(err); +// }); +// testFileStream.on("end", () => { +// try { +// // Run the mocha test +// mocha.run((failures) => { +// if (failures > 0) { +// e(new Error(`${failures} tests failed.`)); +// } else { +// c(); +// } +// }); +// } catch (err) { +// console.error(err); +// e(err); +// } +// }); +// }); +// } diff --git a/vscode-ext/syntaxes/gloo.tmLanguage.json b/vscode-ext/syntaxes/gloo.tmLanguage.json new file mode 100644 index 000000000..d624d88be --- /dev/null +++ b/vscode-ext/syntaxes/gloo.tmLanguage.json @@ -0,0 +1,606 @@ +{ + "fileTypes": ["gloo"], + "name": "Gloo", + "patterns": [ + { "include": "#variantGroup" }, + { "include": "#enumGroup" }, + { "include": "#classGroup" }, + { "include": "#functionGroup" }, + { "include": "#testGroupGroup" }, + { "include": "#clientDefinitionGroup" }, + { "include": "#keywordPattern" }, + { "include": "#backupKeywordPattern" } + ], + "repository": { + "environmentVariablePattern": { + "match": ".*\\s*(@ENV\\.[_\\w]*)", + "captures": { + "1": { + "name": "variable.environment.gloo" + } + } + }, + "dependsOnPhrasePattern": { + "begin": "(@depends_on)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + } + }, + "patterns": [ + { + "match": "\\b(\\w+)\\b", + "captures": { + "1": { + "name": "entity.name.function.gloo" + } + } + } + ] + }, + "dependsOnWordPattern": { + "match": "@depends_on", + "name": "keyword.control.gloo" + }, + "dependsOnGroup": { + "patterns": [ + { "include": "#dependsOnPhrasePattern" }, + { "include": "#dependsOnWordPattern" } + ] + }, + "nestedBrace": { + "begin": "\\{", + "end": "\\}", + "patterns": [ + { + "include": "#nestedBrace" + } + ] + }, + "langPyPhrasePattern": { + "begin": "(@lang\\[py])\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + } + }, + "name": "source.gloo.embedded.python", + "patterns": [ + { + "include": "source.python" + } + ] + }, + "langPhrasePattern": { + "begin": "(@lang\\[\\w+\\])\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + } + }, + "patterns": [ + { + "include": "#nestedBrace" + } + ] + }, + "langWordPattern": { + "match": "@lang(\\[(\\w*\\])?)?", + "name": "keyword.control.gloo" + }, + "langGroup": { + "patterns": [ + { "include": "#langPyPhrasePattern" }, + { "include": "#langPhrasePattern" }, + { "include": "#langWordPattern" } + ] + }, + "methodPhrasePattern": { + "begin": "(@method)\\s+(\\w+[_\\w]*)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.function.gloo" + } + }, + "patterns": [{ "include": "#langGroup" }] + }, + "methodWordPattern": { + "match": "(@method)\\s+(\\w+[_\\w]*)?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.function.gloo" + } + } + }, + "methodGroup": { + "patterns": [ + { "include": "#methodPhrasePattern" }, + { "include": "#methodWordPattern" } + ] + }, + + "inputTypePattern": { + "begin": "^\\s*@input\\s+", + "end": "$", + "patterns": [ + { + "include": "#primitivePattern" + }, + { + "match": "\\b\\w+", + "name": "entity.name.class.gloo" + } + ] + }, + "outputTypePattern": { + "begin": "^\\s*@output\\s+", + "end": "$", + "patterns": [ + { + "include": "#primitivePattern" + }, + { + "match": "\\b\\w+", + "name": "entity.name.class.gloo" + } + ] + }, + "inputStringPattern": { + "match": "^\\s*@input\\s+(.+)$", + "captures": { + "1": { + "name": "string.unquoted.gloo" + } + } + }, + "inputMultilineStringPattern": { + "begin": "^\\s*@input\\s*\\{", + "end": "\\}", + "contentName": "string.unquoted.multiline.gloo" + }, + "inputPythonPattern": { + "begin": "^\\s*@input\\[py\\]\\s*\\{", + "end": "\\}", + "contentName": "source.python", + "patterns": [ + { + "include": "source.python" + } + ] + }, + "inputStringGroup": { + "patterns": [ + { "include": "#inputPythonPattern" }, + { "include": "#inputMultilineStringPattern" }, + { "include": "#inputStringPattern" } + ] + }, + "stringVariablePattern": { + "match": "\\{[^}]+\\}", + "name": "variable.string.gloo" + }, + "promptPattern": { + "match": "^\\s*@prompt\\s+(.*)$", + "captures": { + "1": { + "name": "string.unquoted.multiline.gloo" + } + } + }, + "promptMultilinePattern": { + "begin": "^\\s*@prompt\\s*\\{", + "end": "\\}", + "contentName": "string.unquoted.multiline.gloo", + "patterns": [ + { + "include": "#stringVariablePattern" + }, + { "include": "#nestedBrace" } + ] + }, + "promptGroup": { + "patterns": [ + { "include": "#promptMultilinePattern" }, + { "include": "#promptPattern" } + ] + }, + "clientTypePattern": { + "match": "@client\\[\\w+\\]\\s+(\\w+)", + "captures": { + "1": { + "name": "storage.type.gloo" + } + } + }, + "classVariablePattern": { + "begin": "\\w+\\s+", + "end": "$", + "patterns": [ + { + "include": "#primitivePattern" + }, + { + "match": "\\b\\w+", + "name": "entity.name.class.gloo" + } + ] + }, + "clientVariablePattern": { + "match": "^(@?[_\\w]+)\\s+(@?[_\\w\\-\\.]+(?:\\[\\]|\\?)?)", + "captures": { + "1": { + "name": "text.plain.gloo" + }, + "2": { + "name": "storage.type.gloo" + } + } + }, + "casePhrasePattern": { + "begin": "(@case)\\s+(\\w+)*\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + }, + "patterns": [ + { "include": "#inputStringGroup" }, + { "include": "#methodGroup" } + ] + }, + "caseWordPattern": { + "match": "(@case)(?:\\s+(\\w+))?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + } + }, + "caseGroup": { + "patterns": [ + { "include": "#casePhrasePattern" }, + { "include": "#caseWordPattern" } + ] + }, + "renamePhrasePattern": { + "begin": "(@rename)\\s*\\{", + "end": "\\}", + "name": "string.unquoted.gloo", + "captures": { + "0": { + "name": "keyword.control.gloo" + } + }, + "patterns": [{ "include": "#stringVariablePattern" }] + }, + "renameWordPattern": { + "match": "@rename\\b", + "name": "keyword.control.gloo" + }, + "renameGroup": { + "patterns": [ + { "include": "#renamePhrasePattern" }, + { "include": "#renameWordPattern" } + ] + }, + "describePhrasePattern": { + "begin": "\\s*(@describe)\\s*\\{", + "end": "\\}", + "name": "string.unquoted.gloo", + "captures": { + "0": { + "name": "keyword.control.gloo" + } + }, + "patterns": [{ "include": "#stringVariablePattern" }] + }, + "describeWordPattern": { + "match": "@describe\\b", + "name": "keyword.control.gloo" + }, + "describeGroup": { + "patterns": [ + { "include": "#describePhrasePattern" }, + { "include": "#describeWordPattern" } + ] + }, + "skipPattern": { + "match": "\\s+@skip\\s*$", + "name": "keyword.control.skip.gloo" + }, + "stringifyPhrasePattern": { + "begin": "(@stringify)\\s+(\\w+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + }, + "patterns": [ + { "include": "#renameGroup" }, + { "include": "#describeGroup" }, + { "include": "#skipPattern" } + ] + }, + "stringifyWordPattern": { + "match": "(@stringify)(\\s+\\w+)?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + } + }, + "stringifyGroup": { + "patterns": [ + { "include": "#stringifyPhrasePattern" }, + { "include": "#stringifyWordPattern" } + ] + }, + "testGroupPhrasePattern": { + "begin": "^\\s*(@test_group)\\s+([_\\w]+)\\s+(for)\\s+([_\\w]+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + }, + "3": { + "name": "keyword.control.gloo" + }, + "4": { + "name": "entity.name.function.gloo" + } + }, + "patterns": [ + { "include": "#inputStringGroup" }, + { "include": "#caseGroup" }, + { "include": "#methodGroup" } + ] + }, + "testGroupWordPattern": { + "match": "(@test_group)\\s+(?:(?!for\\b)(\\w+)\\s+(?:(for)(\\s+\\w+)?)?)?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + }, + "3": { + "name": "keyword.control.gloo" + }, + "4": { + "name": "entity.name.function.gloo" + } + } + }, + "testGroupGroup": { + "patterns": [ + { "include": "#testGroupPhrasePattern" }, + { "include": "#testGroupWordPattern" } + ] + }, + "functionPhrasePattern": { + "begin": "(@function)\\s+(\\w+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.function.custom.gloo" + } + }, + "patterns": [ + { "include": "#inputTypePattern" }, + { "include": "#outputTypePattern" } + ] + }, + "functionWordPattern": { + "match": "(@function)(?:\\s+(\\w+))?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.function.custom.gloo" + } + } + }, + "functionGroup": { + "patterns": [ + { "include": "#functionPhrasePattern" }, + { "include": "#functionWordPattern" } + ] + }, + "classPhrasePattern": { + "begin": "(@class)\\s+(\\w+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + }, + "patterns": [ + { + "include": "#methodPattern" + }, + { "include": "#promptPattern" }, + { "include": "#methodGroup" }, + { "include": "#classVariablePattern" }, + { "include": "#primitivePattern" } + ] + }, + "classWordPattern": { + "begin": "(@class)(?:\\s+([\\w]+))?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + } + }, + "classGroup": { + "patterns": [ + { "include": "#classPhrasePattern" }, + { "include": "#classWordPattern" } + ] + }, + "variantPhrasePattern": { + "begin": "^\\s*(@variant\\[\\w+\\])\\s+(\\w+[_\\w]*)\\s+(for)\\s+(\\w+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + }, + "3": { + "name": "keyword.control.gloo" + }, + "4": { + "name": "entity.name.function.gloo" + } + }, + "patterns": [ + { "include": "#promptGroup" }, + { "include": "#clientTypePattern" }, + { "include": "#dependsOnGroup" }, + { "include": "#stringifyGroup" }, + { "include": "#methodGroup" } + ] + }, + "variantWordPattern": { + "match": "(@variant(?:\\[\\w*\\]?)?)\\s+(?:(?!for\\b)(\\w+)\\s+(?:(for)(\\s+\\w+)?)?)?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + }, + "3": { + "name": "keyword.control.gloo" + }, + "4": { + "name": "entity.name.function.gloo" + } + } + }, + "variantGroup": { + "patterns": [ + { "include": "#variantPhrasePattern" }, + { "include": "#variantWordPattern" } + ] + }, + "enumPattern": { + "begin": "(@enum)\\s+(\\w+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + } + }, + "enumWordPattern": { + "match": "(@enum)(\\s+\\w+)?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + } + }, + "enumGroup": { + "patterns": [ + { "include": "#enumPhrasePattern" }, + { "include": "#enumWordPattern" } + ] + }, + "clientDefinitionPhrasePattern": { + "begin": "(@client\\[\\w+\\])\\s+([_\\w]+)\\s*\\{", + "end": "\\}", + "beginCaptures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + }, + "patterns": [ + { "include": "#environmentVariablePattern" }, + { "include": "#clientVariablePattern" } + ] + }, + "clientDefinitionWordPattern": { + "match": "(@client(?:\\[\\w*\\]?)?)(\\s+\\w+)?", + "captures": { + "1": { + "name": "keyword.control.gloo" + }, + "2": { + "name": "entity.name.class.gloo" + } + } + }, + "clientDefinitionGroup": { + "patterns": [ + { "include": "#clientDefinitionPhrasePattern" }, + { "include": "#clientDefinitionWordPattern" } + ] + }, + "backupKeywordPattern": { + "match": "\\s*(@method|@lang\\[\\w*\\]|@method|@describe|@rename|@function|@stringify|@prompt|)", + "name": "invalid.illegal.unrecognized-keyword.gloo" + }, + "primitivePattern": { + "match": "\\b(char|string|int|float|bool|null)\\b(\\[\\]|\\?)?", + "captures": { + "1": { + "name": "storage.type.gloo" + }, + "2": { + "name": "text.plain.gloo" + } + } + } + }, + "scopeName": "source.gloo" +} diff --git a/vscode-ext/tsconfig.json b/vscode-ext/tsconfig.json new file mode 100644 index 000000000..daa054efb --- /dev/null +++ b/vscode-ext/tsconfig.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "ES6", + "lib": ["es6"], + "outDir": "out", + "sourceMap": true, + "strict": true, + "rootDir": "src" + }, + "exclude": ["node_modules", ".vscode-test"] +}