├── theme
├── header.hbs
└── head.hbs
├── src
├── introduction
│ ├── contributing.md
│ └── authors.md
├── general-concepts
│ ├── temperature
│ │ └── README.md
│ ├── load-balancing
│ │ ├── gateway
│ │ │ └── README.md
│ │ ├── README.md
│ │ ├── reverse-proxy
│ │ │ └── README.md
│ │ └── forward-proxy
│ │ │ └── README.md
│ ├── large-language-model
│ │ └── README.md
│ ├── continuous-batching
│ │ └── README.md
│ ├── embedding
│ │ └── README.md
│ └── input-output
│ │ └── README.md
├── deployments
│ ├── llama.cpp
│ │ ├── README.md
│ │ ├── aws-ec2-cuda
│ │ │ └── README.md
│ │ └── aws-image-builder
│ │ │ └── README.md
│ ├── paddler
│ │ └── README.md
│ └── ollama
│ │ └── README.md
├── customization
│ ├── fine-tuning
│ │ └── README.md
│ └── retrieval-augmented-generation
│ │ └── README.md
├── predictability
│ ├── structured-outputs
│ │ └── matching-grammar
│ │ │ └── README.md
│ └── README.md
├── application-layer
│ ├── README.md
│ ├── architecture
│ │ └── long-running
│ │ │ └── README.md
│ └── optimization
│ │ └── asynchronous-programming
│ │ └── README.md
├── README.md
└── SUMMARY.md
├── .gitignore
├── resources
└── ts
│ └── global_mermaid.ts
├── .gitattributes
├── package.json
├── .editorconfig
├── README.md
├── book.toml
├── Makefile
├── tsconfig.json
├── CONTRIBUTING
├── .github
└── workflows
│ └── github_pages.yml
├── mdbook-admonish.css
├── LICENSE
└── yarn.lock
/theme/header.hbs:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/introduction/contributing.md:
--------------------------------------------------------------------------------
1 | {{#include ../../CONTRIBUTING}}
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.pnp.cjs
2 | /.pnp.loader.mjs
3 | /.yarn
4 | /assets
5 | /book
6 |
--------------------------------------------------------------------------------
/theme/head.hbs:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/resources/ts/global_mermaid.ts:
--------------------------------------------------------------------------------
1 | import mermaid from "mermaid"
2 |
3 | mermaid.initialize({
4 | startOnLoad:true,
5 | theme: "neutral",
6 | });
7 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | /.yarn/** linguist-vendored
2 | /.yarn/releases/* binary
3 | /.yarn/plugins/**/* binary
4 | /.pnp.* binary linguist-generated
5 |
--------------------------------------------------------------------------------
/src/general-concepts/temperature/README.md:
--------------------------------------------------------------------------------
1 | # Temperature
2 |
3 | The temperature parameter in LLMs controls the randomness of the output. Lower temperatures make the output more deterministic (less creative), while higher temperatures increase variability. Even at low temperatures, some variability remains due to the probabilistic nature of the models.
4 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "llmops-handbook",
3 | "packageManager": "yarn@4.0.2",
4 | "dependencies": {
5 | "@hotwired/stimulus": "^3.2.2",
6 | "@hotwired/turbo": "^8.0.4",
7 | "mermaid": "^10.9.1",
8 | "tslib": "^2.6.3",
9 | "typescript": "^5.5.2"
10 | },
11 | "devDependencies": {
12 | "@types/hotwired__turbo": "^8",
13 | "esbuild": "^0.21.5"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | end_of_line = lf
6 | insert_final_newline = true
7 | indent_style = space
8 | indent_size = 4
9 | trim_trailing_whitespace = true
10 |
11 | [*.example]
12 | indent_size = 2
13 |
14 | [*.go]
15 | indent_size = 4
16 | indent_style = tab
17 |
18 | [*.md]
19 | trim_trailing_whitespace = false
20 |
21 | [*.{css,scss,js,tf,ts,tsx,yml,yaml}]
22 | indent_size = 2
23 |
24 | [Dockerfile]
25 | indent_size = 2
26 |
27 | [Makefile]
28 | indent_style = tab
29 |
--------------------------------------------------------------------------------
/src/introduction/authors.md:
--------------------------------------------------------------------------------
1 | # Authors
2 |
3 | All contributions are welcome. If you want to contribute, follow our [contributing guideline](/introduction/contributing.html).
4 |
5 | - Mateusz Charytoniuk ([LinkedIn](https://www.linkedin.com/in/mateusz-charytoniuk/), [GitHub](https://github.com/distantmagic)) - Author of the original version of this handbook, project maintainer.
6 |
7 | ## Contributors
8 |
9 | If you want to be on this list, contribute a new article or substantially expand or update any existing article with new information.
10 |
--------------------------------------------------------------------------------
/src/deployments/llama.cpp/README.md:
--------------------------------------------------------------------------------
1 | # llama.cpp
2 |
3 | ```admonish
4 | GitHub Repository:
5 | ```
6 |
7 | Llama.cpp is a production-ready, open-source runner for various [Large Language Models](/general-concepts/large-language-model).
8 |
9 | It has an excellent built-in [server](https://github.com/ggerganov/llama.cpp/tree/master/examples/server) with HTTP API.
10 |
11 | In this handbook, we will use [Continuous Batching](/general-concepts/continuous-batching), which in practice allows handling parallel requests.
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # LLMOps Handbook
2 |
3 | This is the handbook for the LLM Operations team. It is a living document.
4 |
5 | For the rendered version, visit our website: https://llmops-handbook.distantmagic.com/
6 |
7 | ## Our Other Projects
8 |
9 | - [Paddler](https://github.com/distantmagic/paddler) - Stateful load balancer custom-tailored for llama.cpp
10 | - [Resonance](https://github.com/distantmagic/resonance) - Modern PHP framework for IO-intensive applications
11 |
12 | ## License
13 |
14 | [Creative Commons Attribution Share Alike 4.0 International](./LICENSE)
15 |
16 | ## Community
17 |
18 | Discord: https://discord.gg/kysUzFqSCK
19 |
--------------------------------------------------------------------------------
/src/customization/fine-tuning/README.md:
--------------------------------------------------------------------------------
1 | # Fine-tuning
2 |
3 | Fine-tuning is taking a pre-trained model and further training it on a new task. This is typically useful when you want to repurpose a model trained on a large-scale dataset for a new task with less data available.
4 |
5 | In practice, that means fine-tuning allows the model to adapt to the new data without forgetting what it has learned before.
6 |
7 | A good example might be the [sqlcoder](https://github.com/defog-ai/sqlcoder) model, which is a fine-tuned [starcoder](https://github.com/bigcode-project/starcoder) model (which is a general coding model) to be exceptionally good at producing SQL.
8 |
--------------------------------------------------------------------------------
/book.toml:
--------------------------------------------------------------------------------
1 | [book]
2 | authors = ["Mateusz Charytoniuk"]
3 | language = "en"
4 | multilingual = false
5 | src = "src"
6 | title = "LLMOps Handbook (work in progress)"
7 |
8 | [output.html]
9 | additional-js = [
10 | "assets/global_mermaid.js"
11 | ]
12 | default-theme = "ayu"
13 | edit-url-template = "https://github.com/distantmagic/llmops-handbook/edit/main/{path}"
14 | git-repository-url = "https://github.com/distantmagic/llmops-handbook/tree/main"
15 | preferred-dark-theme = "ayu"
16 | smart-punctuation = true
17 | additional-css = ["./mdbook-admonish.css"]
18 |
19 | [preprocessor.mermaid]
20 | command = "mdbook-mermaid"
21 |
22 | [preprocessor.admonish]
23 | command = "mdbook-admonish"
24 | assets_version = "3.0.2" # do not edit: managed by `mdbook-admonish install`
25 |
--------------------------------------------------------------------------------
/src/deployments/paddler/README.md:
--------------------------------------------------------------------------------
1 | # Paddler
2 |
3 | ```admonish
4 | Additional note from the author of the handbook:
5 |
6 | Paddler is my personal project and is not part of the llama.cpp, but I am including it here as it is a useful tool for deploying llama.cpp in production. It helped me, and I hope it helps you too.
7 | ```
8 |
9 | ```admonish
10 | GitHub Repository:
11 | ```
12 |
13 | Paddler is an open-source, stateful load balancer and reverse proxy designed for servers running `llama.cpp`. Unlike typical strategies like round-robin or least connections, Paddler uses each server's available slots.
14 |
15 | It uses agents to monitor the health of `llama.cpp` instances and dynamically adjust to adding or removing servers, making it easier to integrate with autoscaling tools.
16 |
--------------------------------------------------------------------------------
/src/general-concepts/load-balancing/gateway/README.md:
--------------------------------------------------------------------------------
1 | # Gateway
2 |
3 | Functionally, forward and reverse proxies are similar in that they both act as intermediaries handling requests. The key difference lies in the direction of the request. A forward proxy is used by clients to forward requests to other servers, often used to bypass network restrictions or for caching. It announces its presence to the end user. In contrast, servers use a reverse proxy to forward responses to clients, often for load balancing, security, and caching purposes, and it hides its presence from the end user.
4 |
5 | When combined, forward and reverse proxies can create a gateway. A gateway serves as a front-end for underlying services and acts as an entry point for users to the application. It handles both incoming client requests and outgoing server responses.
6 |
--------------------------------------------------------------------------------
/src/general-concepts/large-language-model/README.md:
--------------------------------------------------------------------------------
1 | # Large Language Model
2 |
3 | >
4 | > Everyone can see what a horse is.
5 | >
6 | > -- [Benedykt Chmielowski](https://en.wikipedia.org/wiki/Benedykt_Chmielowski), [Nowe Ateny](https://en.wikipedia.org/wiki/Nowe_Ateny) (1745)
7 | >
8 |
9 | For practical purposes, in this handbook, any AI model that can handle user prompts to produce human-like text and follow instructions is considered a "large language model." This includes GPT, Llama, and any other models that may be developed in the future.
10 |
11 | These models are typically trained on a large corpus of text data and can generate human-like text in response to user prompts.
12 |
13 | In this handbook, we will discuss how to use these models, fine-tune them, evaluate their performance, and build applications around them.
14 |
15 | We will not focus on how to create new Large Language Models nor on their internal architecture besides the basics.
16 |
--------------------------------------------------------------------------------
/src/customization/retrieval-augmented-generation/README.md:
--------------------------------------------------------------------------------
1 | # Retrieval Augmented Generation
2 |
3 | Retrieval augmented generation does not modify the underlying model in any way. Instead, it is an approach to directly influence its responses.
4 |
5 | In practice, and in a significant simplification, RAG is about injecting data into [Large Language Model](/general-concepts/large-language-model) prompt.
6 |
7 | For example, let's say the user asks the LLM:
8 | - `What are the latest articles on our website?`
9 |
10 | To augment the response, you need to intercept the user's question and tell LLM to respond in a way more or less like:
11 | - `You are a . Tell the user that the latest articles on our site are `
12 |
13 | That is greatly simplified, but generally, that is how it works. Along the way, [embeddings](/general-concepts/embeddings) and [vector databases](/general-concepts/vector-database) are involved.
14 |
--------------------------------------------------------------------------------
/src/general-concepts/load-balancing/README.md:
--------------------------------------------------------------------------------
1 | # Load Balancing
2 |
3 | Load balancing allows you to distribute the load (preferably evenly) among multiple servers.
4 |
5 | In this handbook, we assume that you intend to use GPU or TPU servers for inference. TPU and GPU pose pretty much the same class of benefits and issues, so we will use the term GPU to cover all of them.
6 |
7 | The interesting thing is that having some experience with 3D game development might help you get into LLMOps and resolve some GPU-related issues.
8 |
9 | ## Differences Between Balancing GPU and CPU Load
10 |
11 | In the context of LLMOps, the primary factors we have to deal with this time are [Input/Output](/general-concepts/input-output) bottlenecks instead of the usual CPU bottlenecks. That forces us to adjust how we design our infrastructure and applications.
12 |
13 | We will also often use a different set of metrics than traditional load balancing, which are usually closer to the application level (like the number of available context slots being used, the number of buffered application requests, and such).
14 |
--------------------------------------------------------------------------------
/src/predictability/structured-outputs/matching-grammar/README.md:
--------------------------------------------------------------------------------
1 | # Matching the Grammar
2 |
3 | Some Large Language Model runners support [formal grammars](https://en.wikipedia.org/wiki/Formal_grammar). It can be used to force the output to follow a certain structure (for example, speaking only in emojis or outputting just the valid moves from the [Portable Game Notation](https://en.wikipedia.org/wiki/Portable_Game_Notation)).
4 |
5 | It still does not guarantee that the output will be valid (in a semantic sense), but at least matching the formal grammar guarantees it will follow the correct structure.
6 |
7 | One of the popular uses is to force a Large Language Model to match a specific [JSON Schema](/predictability/structured-outputs/matching-json-schema).
8 |
9 | ## llama.cpp
10 |
11 | `llama.cpp` supports [GBNF formal grammars](https://github.com/ggerganov/llama.cpp/blob/master/grammars/README.md), which is an extension of [Backus-Naur form](https://en.wikipedia.org/wiki/Backus%E2%80%93Naur_form), but with the support of some [regular expressions](https://en.wikipedia.org/wiki/Regular_expression).
12 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ESBUILD_TARGET_DIRECTORY ?= assets
2 | CSS_ENTRYPOINTS := $(wildcard resources/ts/mdbook_*.ts)
3 | CSS_SOURCES := $(wildcard resources/css/*.css)
4 | TS_ENTRYPOINTS := $(wildcard resources/ts/global_*.ts)
5 | TS_SOURCES := $(wildcard resources/ts/*.ts)
6 |
7 | assets/global_mermaid.js: $(TS_SOURCES) .pnp.cjs
8 | yarnpkg run esbuild \
9 | --bundle \
10 | --asset-names="./[name]" \
11 | --entry-names="./[name]" \
12 | --format=iife \
13 | --loader:.jpg=file \
14 | --loader:.otf=file \
15 | --loader:.svg=file \
16 | --loader:.ttf=file \
17 | --loader:.webp=file \
18 | --minify \
19 | --outdir=$(ESBUILD_TARGET_DIRECTORY) \
20 | --sourcemap \
21 | --target=safari16 \
22 | --tree-shaking=true \
23 | --tsconfig=tsconfig.json \
24 | $(TS_ENTRYPOINTS) \
25 | ;
26 |
27 | .pnp.cjs: yarn.lock
28 | yarnpkg install --immutable
29 | touch .pnp.cjs
30 |
31 | .PHONY: build
32 | build: assets/global_mermaid.js
33 | mdbook build
34 |
35 | .PHONY: clean
36 | clean:
37 | rm -rf ./assets
38 |
39 | .PHONY: serve
40 | serve: assets/global_mermaid.js
41 | mdbook serve \
42 | --hostname 127.0.0.1 \
43 | --port 3000 \
44 | --watcher native
45 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "allowJs": true,
4 | "allowSyntheticDefaultImports": true,
5 | "alwaysStrict": true,
6 | "downlevelIteration": false,
7 | "emitDecoratorMetadata": true,
8 | "esModuleInterop": true,
9 | "experimentalDecorators": true,
10 | "forceConsistentCasingInFileNames": true,
11 | "importHelpers": true,
12 | "isolatedModules": true,
13 | "jsx": "react",
14 | "module": "esnext",
15 | "moduleResolution": "node",
16 | "noFallthroughCasesInSwitch": true,
17 | "noImplicitAny": true,
18 | "noImplicitReturns": true,
19 | "noImplicitThis": true,
20 | "noPropertyAccessFromIndexSignature": true,
21 | "noUncheckedIndexedAccess": true,
22 | "noUnusedLocals": true,
23 | "removeComments": true,
24 | "resolveJsonModule": true,
25 | "skipLibCheck": true,
26 | "sourceMap": true,
27 | "strict": true,
28 | "strictBindCallApply": true,
29 | "strictFunctionTypes": true,
30 | "strictNullChecks": true,
31 | "strictPropertyInitialization": false,
32 | "target": "es2022"
33 | },
34 | "include": [
35 | "resources/ts/**/*"
36 | ]
37 | }
38 |
--------------------------------------------------------------------------------
/src/general-concepts/continuous-batching/README.md:
--------------------------------------------------------------------------------
1 | # Continuous Batching
2 |
3 | Continuous Batching is an algorithm that allows LLM runners like `llama.cpp` to better utilize GPU processing time.
4 |
5 | It allows the server to handle multiple completion requests in parallel.
6 |
7 | ## Explanation and Demonstration
8 |
9 | The best person to explain how Continuous Batching is implemented in `llama.cpp` is the author of the library, Georgi Gerganov. Here is his tweet explaining the concept and demonstrating the algorithm's speed.
10 |
11 | If you want to dig even deeper, you can check out the [GithHub discussion](https://github.com/ggerganov/llama.cpp/discussions/4130#discussioncomment-8053636) that further explains this.
12 |
13 |
Serving 8 clients in parallel on A100 with llama.cpp
Model: Codellama 7B F16 System prompt: 305 tokens Requests: 128 Max sequence length: 100 Continuous batching: enabled
14 |
--------------------------------------------------------------------------------
/src/general-concepts/embedding/README.md:
--------------------------------------------------------------------------------
1 | # Embedding
2 |
3 | Formally, embedding represents a word (or a phrase) in a vector space. In this space, words with similar meanings are close to each other.
4 |
5 | For example, the words "dog" and "cat" might be close to each other in the vector space because they are both animals.
6 |
7 | ## RGB Analogy
8 |
9 | Because embeddings can be vectors with 4096 or more dimensions, it might be hard to imagine them and get a good intuition on how they work in practice.
10 |
11 | A good analogy for getting an intuition about embeddings is to imagine them as points in 3D space first.
12 |
13 | Let's assume a color represented by RGB is our embedding. It is a 3D vector with 3 values: red, green, and blue representing 3 dimensions. Similar colors in that space are placed near each other. Red is close to orange, blue and green are close to teal, etc.
14 |
15 | Embeddings work similarly. Words and phrases are represented by vectors, and similar words are placed close to each other in the vector space.
16 |
17 | Searching through similar embeddings to a given one means we are looking for vectors that are placed close to the given embedding.
18 |
19 | 
20 |
--------------------------------------------------------------------------------
/src/general-concepts/load-balancing/reverse-proxy/README.md:
--------------------------------------------------------------------------------
1 | # Reverse Proxy
2 |
3 | A reverse proxy server retrieves resources from one or more servers on a client's behalf. These resources are then returned to the client, appearing to originate from the source server itself. It abstracts your infrastructure setup from the end users, which is useful for implementing scaling, security middleware, and load balancing.
4 |
5 | While forward and reverse proxies may seem functionally similar, their differences lie primarily in their use cases and perspectives. A forward proxy acts on behalf of clients seeking resources from various servers, often used for client privacy and access control. A reverse proxy acts on behalf of servers, making resources available to clients while hiding the backend server details.
6 |
7 | That means a reverse proxy hides its presence from the clients and acts as an intermediary between them and the servers. When you communicate with a reverse proxy, it is as if you communicated directly with the target server.
8 |
9 | That is one of the primary differences between [forward proxy](/general-concepts/load-balancing/forward-proxy) and a reverse proxy.
10 |
11 | You can combine both [forward proxy](/general-concepts/load-balancing/forward-proxy) and reverse proxy to create a [gateway](/general-concepts/load-balancing/gateway).
12 |
13 | ## Paddler
14 |
15 | Paddler is a reverse proxy server and load balancer made specifically for `llama.cpp`. You can communicate with it like a regular `llama.cpp` instance. You can learn more on it's [dedicated page](/deployments/paddler).
16 |
--------------------------------------------------------------------------------
/src/application-layer/README.md:
--------------------------------------------------------------------------------
1 | # Application Layer
2 |
3 | This chapter is not strictly related to LLMOps, but discussing the best practices for architecting and developing applications that use them would be a good idea.
4 |
5 | Those applications have to deal with some issues that are not typically met in traditional web development, primarily long-running HTTP requests or MLOps - using custom models for inference.
6 |
7 | Up until [Large Language Models](/general-concepts/large-language-model) became mainstream and in demand by a variety of applications, the issue of dealing with long-running requests was much less prevalent. Typically, due to functional requirements, all the microservice requests normally would take 10ms or less, while waiting for a [Large Language Models](/general-concepts/large-language-model) to complete the inference can take multiple seconds.
8 |
9 | That calls for some adjustments in the application architecture, non-blocking [Input/Output](/general-concepts/input-output) and [asynchronous programming](/application-layer/optimization/asynchronous-programming).
10 |
11 | This is where asynchronous programming languages shine, like Python with its `asyncio` library or Rust with its `tokio` library, Go with its goroutines, etc.
12 |
13 | Programming languages like `PHP`, which are synchronous by default, might struggle unless supplemented by extensions like [Swoole](https://swoole.com/) (which essentially gives PHP Go-like coroutines) or libraries like [AMPHP](https://amphp.org/). Introducing support for asynchronous programming in PHP can be a challenge, but it is possible.
14 |
--------------------------------------------------------------------------------
/src/general-concepts/load-balancing/forward-proxy/README.md:
--------------------------------------------------------------------------------
1 | # Forward Proxy
2 |
3 | A forward proxy is an intermediary server between the client and the origin server. Clients connect to the forward proxy server and request a resource (such as a completion) available on a different server that is otherwise inaccessible to them. The forward proxy server retrieves the resource and forwards it to the client.
4 |
5 | You can combine both forward proxy and [reverse proxy](/general-concepts/load-balancing/reverse-proxy) to create a [gateway](/general-concepts/load-balancing/gateway).
6 |
7 | ## llama.cpp Forward Proxy
8 |
9 | llama.cpp implements it's own [forward proxy](https://github.com/ggerganov/llama.cpp/tree/148ec970b62c3c5ae0a8bfdaad2fc237aaae350d/examples/rpc) in the form of RPC server.
10 |
11 | It puts the `llama.cpp` server in form of multiple backends and distributes requests among them.
12 |
13 | ```mermaid
14 | flowchart TD
15 | rpcb---|TCP|srva
16 | rpcb---|TCP|srvb
17 | rpcb-.-|TCP|srvn
18 | subgraph hostn[Host N]
19 | srvn[rpc-server]-.-backend3["Backend (CUDA,Metal,etc.)"]
20 | end
21 | subgraph hostb[Host B]
22 | srvb[rpc-server]---backend2["Backend (CUDA,Metal,etc.)"]
23 | end
24 | subgraph hosta[Host A]
25 | srva[rpc-server]---backend["Backend (CUDA,Metal,etc.)"]
26 | end
27 | subgraph host[Main Host]
28 | ggml[llama.cpp]---rpcb[RPC backend]
29 | end
30 | ```
31 |
32 |
37 |
--------------------------------------------------------------------------------
/CONTRIBUTING:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | First of all, every contribution is welcomed.
4 |
5 | You do not have to add or improve articles to contribute. Even giving us suggestions or general ideas is valuable if you want to join in.
6 |
7 | To discuss the handbook contents, use [GitHub discussions](https://github.com/distantmagic/llmops-handbook/discussions).
8 |
9 | ## What are we looking for?
10 |
11 | This handbook is intended to be a living document that evolves with the community. It is aimed at more advanced LLM users who want to deploy scalable setups and/or be able to architect applications around them.
12 |
13 | It focuses primarily on runners like `llama.cpp` or `VLLM`, aimed at production usage. However, if you find an interesting use case for `aphrodite`, `tabby`, or any other runner, that is also welcomed.
14 |
15 | Those are just general ideas. Anything related to the infrastructure, application layer, and tutorials is welcomed. If you have an interesting approach to using LLMs, feel free to contribute that also.
16 |
17 | ## How to contribute?
18 |
19 | We are using [GitHub issues](https://github.com/distantmagic/llmops-handbook/issues) and [pull requests](https://github.com/distantmagic/llmops-handbook/pulls) to organize the work.
20 |
21 | ### Submitting a new article
22 |
23 | If you want to submit an article:
24 | 1. Start a GitHub issue with an outline (with general points you want to cover) so we can decide together if it fits the handbook.
25 | 2. If the article fits the handbook, add a new page and create a pull request with a finished article.
26 |
27 | ### Updating an article
28 |
29 | If you want to improve an existing article, start an issue to let us know your thoughts or create a pull request if you are ready to add changes. Add an `improvement` tag to such an issue or pull request.
30 |
31 | ### Scrutiny
32 |
33 | If you think something in the handbook is incorrect, add a new issue with a `scrutiny` tag and point out the issues.
34 |
--------------------------------------------------------------------------------
/.github/workflows/github_pages.yml:
--------------------------------------------------------------------------------
1 | name: github pages
2 | on:
3 | push:
4 | branches:
5 | - main
6 |
7 | jobs:
8 | deploy:
9 | runs-on: ubuntu-latest
10 | permissions:
11 | contents: write
12 | pages: write
13 | id-token: write
14 | steps:
15 | - uses: actions/checkout@v4
16 | with:
17 | fetch-depth: 0
18 | - name: enable corepack before setting up node
19 | run: corepack enable
20 | - uses: actions/setup-node@v4
21 | with:
22 | node-version: 20
23 | - name: Install cargo
24 | uses: actions-rs/toolchain@v1
25 | with:
26 | toolchain: stable
27 | profile: minimal
28 | - name: cache cargo
29 | uses: actions/cache@v4
30 | with:
31 | path: |
32 | ~/.cargo/bin/
33 | ~/.cargo/registry/index/
34 | ~/.cargo/registry/cache/
35 | ~/.cargo/git/db/
36 | target/
37 | key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
38 | - name: Check and install mdbook if not already installed
39 | run: |
40 | if ! command -v mdbook &> /dev/null; then
41 | cargo install mdbook
42 | fi
43 | - name: Check and install mdbook-admonish if not already installed
44 | run: |
45 | if ! command -v mdbook-admonish &> /dev/null; then
46 | cargo install mdbook-admonish
47 | fi
48 | - name: Check and install mdbook-mermaid if not already installed
49 | run: |
50 | if ! command -v mdbook-mermaid &> /dev/null; then
51 | cargo install mdbook-mermaid
52 | fi
53 | - name: build book
54 | run: make build
55 | - name: Setup Pages
56 | uses: actions/configure-pages@v4
57 | - name: Upload artifact
58 | uses: actions/upload-pages-artifact@v3
59 | with:
60 | path: 'book'
61 | - name: Deploy to GitHub Pages
62 | id: deployment
63 | uses: actions/deploy-pages@v4
64 |
--------------------------------------------------------------------------------
/src/README.md:
--------------------------------------------------------------------------------
1 | # Introduction
2 |
3 | This handbook is a practical and advanced guide to LLMOps. It provides a solid understanding of large language models' general concepts, deployment techniques, and software engineering practices. With this knowledge, you will be prepared to maintain the entire stack confidently.
4 |
5 | This handbook focuses more on LLM runners like `llama.cpp` or `VLLM`, which can scale and behave predictably in the infrastructure, rather than runners focused more on casual use cases.
6 |
7 | It will teach you how to use large language models in professional applications, self-host Open-Source models, and build software around them. It goes beyond just [Retrieval Augmented Generation](/customization/retrieval-augmented-generation) and [Fine Tuning](/customization/fine-tuning).
8 |
9 | It assumes you are interested in self-hosting open source [Large Language Models](/general-concepts/large-language-model). If you only want to use them through HTTP APIs, you can jump straight to the [application layer](/application-layer) best practices.
10 |
11 | This is a living document, which means it will be updated regularly. To follow us, visit our [GitHub repository](https://github.com/distantmagic/llmops-handbook).
12 |
13 | ## What is LLMOps?
14 |
15 | `LLMOps` is a set of practices that deals with deploying, maintaining, and scaling [Large Language Models](/general-concepts/large-language-model). If you want to consider yourself an `LLMOps` practitioner, you should be able to, at minimum, deploy and maintain a scalable setup of multiple running LLM instances.
16 |
17 | ## New Class of Opportunities, New Class of Problems
18 |
19 | Although there has been a recent trend of naming everything `*Ops` (`DevOps`, `Product Ops`, `MLOps`, `LLMOps`, `BizOps`, etc.), `LLMOps` and `MLOps` truly deserve their place as a standalone set of practices.
20 |
21 | They bridge the gap between the applications and AI models deployed in the infrastructure. They also address specific issues arising from using GPUs and TPUs, with the primary stress being [Input/Output](/general concepts/input-output) optimizations.
22 |
--------------------------------------------------------------------------------
/src/predictability/README.md:
--------------------------------------------------------------------------------
1 | # Predictability
2 |
3 | The common issue with [Large Language Models](/general-concepts/large-language-model) is the consistency and structure of outputs.
4 |
5 | ## Software Engineering vs AI
6 |
7 | The last few decades of IT developments have accustomed us to extreme predictability. Each time we call a specific API endpoint or use a specific button, the same thing happens consistently, under our complete control.
8 |
9 | That is not the case with AI, which operates on probabilities. That stems from the approach to creating software. Neural network designers design just the network and the training process, but they do not design the actual reasoning. The reasoning is learned by the network during training, and it is not under the control of the designer.
10 |
11 | That is totally different from the traditional software development process, where we design the reasoning and the process, and the software just executes it.
12 |
13 | That is why you might feed [Large Language Models](/general-concepts/large-language-model) with the same prompt multiple times and get different outputs each time. [Temperature](/general-concepts/temperature) parameter may be used to limit the "creativeness" of the model, but even setting it to zero does not guarantee predictable outputs.
14 |
15 | ## Structured Outputs
16 |
17 | While LLMs not being completely predictable may cause some issues, but no technical solution is completely one-sided and we can turn that flexibility into our advantage.
18 |
19 | LLMs are extremely good in understanding natural language. In practice we can finally communicate with computers in a similar way we communicate with other people. We can create systems that interpret such unstructured inputs and react to them in a structured and predictable way. This way we can use the good parts of LLMs to our advantage and mitigate most of the unredictability issues.
20 |
21 | ## Use Cases
22 |
23 | Some use cases include (but are not limited to):
24 | - Searching through unstructured documents (e.g., reports in .pdf, .doc, .csv, or plain text)
25 | - Converting emails into actionable structures (e.g., converting requests for quotes into API calls with parameters for internal systems)
26 | - Question answering systems that interpret the context of user queries
27 |
--------------------------------------------------------------------------------
/src/SUMMARY.md:
--------------------------------------------------------------------------------
1 | # Summary
2 |
3 | - [Introduction](README.md)
4 | - [Authors](./introduction/authors.md)
5 | - [Contributing](./introduction/contributing.md)
6 | - [General Concepts]()
7 | - [Continuous Batching](./general-concepts/continuous-batching/README.md)
8 | - [Embedding](./general-concepts/embedding/README.md)
9 | - [Input/Output](./general-concepts/input-output/README.md)
10 | - [Large Language Model](./general-concepts/large-language-model/README.md)
11 | - [Load Balancing](./general-concepts/load-balancing/README.md)
12 | - [Forward Proxy](./general-concepts/load-balancing/forward-proxy/README.md)
13 | - [Reverse Proxy](./general-concepts/load-balancing/reverse-proxy/README.md)
14 | - [Gateway](./general-concepts/load-balancing/gateway/README.md)
15 | - [Model Parameters]()
16 | - [Supervisor]()
17 | - [Temperature](./general-concepts/temperature/README.md)
18 | - [Vector Database]()
19 | - [Infrastructure]()
20 | - [llama.cpp](./deployments/llama.cpp/README.md)
21 | - [Installing on AWS EC2 with CUDA](./deployments/llama.cpp/aws-ec2-cuda/README.md)
22 | - [Installing with AWS Image Builder](./deployments/llama.cpp/aws-image-builder/README.md)
23 | - [Kubernetes]()
24 | - [Ollama](./deployments/ollama/README.md)
25 | - [Paddler](./deployments/paddler/README.md)
26 | - [VLLM]()
27 | - [Customization]()
28 | - [Fine-tuning](./customization/fine-tuning/README.md)
29 | - [Retrieval Augmented Generation](./customization/retrieval-augmented-generation/README.md)
30 | - [Predictability](./predictability/README.md)
31 | - [Hallucinations]()
32 | - [Consistent Outputs]()
33 | - [Structured Outputs]()
34 | - [Matching the JSON Schema]()
35 | - [Matching the Grammar](./predictability/structured-outputs/matching-grammar/README.md)
36 | - [Data Objects (including Pydantic)]()
37 | - [Function Calling]()
38 | - [Application Layer](./application-layer/README.md)
39 | - [Architecture]()
40 | - [Long-Running](./application-layer/architecture/long-running/README.md)
41 | - [Serverless]()
42 | - [Optimization]()
43 | - [Asynchronous Programming](./application-layer/optimization/asynchronous-programming/README.md)
44 | - [Input/Ouput Bottlenecks]()
45 | - [Tutorials]()
46 | - [LLM WebSocket chat with llama.cpp]()
47 | - [Serving completions with llama.cpp]()
48 |
--------------------------------------------------------------------------------
/src/general-concepts/input-output/README.md:
--------------------------------------------------------------------------------
1 | # Input/Output
2 |
3 | In the broad sense, an application can either wait for a CPU to finish processing something or for some external operation to complete (like a long-running HTTP request or waiting for some specific device to be ready).
4 |
5 | While the primary bottleneck in traditional applications and setups is often the CPU, when working with LLMs, it is the GPU and general Input/Output issues.
6 |
7 | For example, when working with an LLM's HTTP API, requests can take multiple seconds to complete. The same endpoints can have vastly varying response times. This can be due to the GPU being busy, the model being swapped out of memory, or the prompt itself.
8 |
9 | A lot of LLMOps issues are about working around those issues.
10 |
11 | ## Infrastructure
12 |
13 | Regarding the infrastructure, IO issues require us to use a different set of metrics than with CPU-bound applications.
14 |
15 | For example, if you are running `llama.cpp` server with 8 available slots, using even 2-3 of them might put your GPU at a strain. The fact is, thanks to [Continuous Batching](/continuous-batching/README.md) and caching of generated tokens, the server might easily handle 5 more parallel requests, but the metrics like percentage of hardware usage will suggest that you have to scale up, which is not the case and might be a waste of resources.
16 |
17 | ## Application Layer
18 |
19 | While LLMOps primarily focuses on the infrastructure; on the application layer, Input/Output issues make it extremely important to pick a programming language that supports concurrency or parallelism—either asynchronicity or threaded execution to not block your application's execution (languages like JavaScript, Golang, Python with [asyncio](https://docs.python.org/3/library/asyncio.html), and Rust are perfect choices here).
20 |
21 | PHP can also be used, but I recommend [Swoole](https://swoole.com/) language extension (which gives PHP Go-like coroutines) or [AMPHP](https://amphp.org/) library to complement it - by default, PHP is synchronous and combined with [FPM](https://www.php.net/manual/en/install.fpm.php) it relies on a worker pool. Let's say you have 32 synchronous workers running in your application. It is possible to block all of them when handling 32 requests in parallel and executing 20-second+ HTTP requests in all of them. You might be in a situation where your CPU is almost idling, but your server cannot handle more requests.
22 |
23 | The same applies to Python, but it has more mature built-in solutions to handle the same issues and gives easier access to multiprocessing and threading.
24 |
25 | You can read more in the [Application Layer](/application-layer/README.md) section.
26 |
--------------------------------------------------------------------------------
/src/application-layer/architecture/long-running/README.md:
--------------------------------------------------------------------------------
1 | # Long-Running
2 |
3 | In web development, there are two primary application models:
4 |
5 | 1. Long-running processes (for example, a web application written in `Go` that keeps running and the same process responds to multiple incoming requests)
6 | 2. Worker-based, single-threaded, synchronous (for example, `PHP` with `FPM`, `Ruby`, and some `Python` setups - it is generally used by scripted languages)
7 |
8 | It is not necessarily connected to a specific language (for example, `PHP` can also start a long-running process with a web server, but most `PHP` frameworks were not designed with that application model in mind and without extensions like [Swoole](https://swoole.com/), it won't be preemptive).
9 |
10 | Python can run synchronously with some frameworks and `WSGI`, but it can also be run as a long-running application with ASGI or projects like [Granian](https://github.com/emmett-framework/granian).
11 |
12 | ## The Problem with the Worker-Based Synchronous Model
13 |
14 | We will use PHP-FPM as an example. On [Debian](https://www.debian.org/), it comes preconfigured with the `max_children` parameter set to `5` by default, which means it can spawn at most 5 workers and handle at most 5 requests in parallel. This parameter can be tweaked, and under normal circumstances, it can be changed to a much higher value at the cost of RAM memory used.
15 |
16 | Let's assume we have 32 workers running. Normally, the time to respond to a request takes at most milliseconds, and this runtime model is not an issue, but working with LLMs or ML models turns that around. Typical requests to LLM can take multiple seconds to complete, sometimes 20-30 seconds, depending on the number of tokens to be generated. That means if we have 32 requests in parallel to our application (which is not a lot), all the synchronous PHP workers can be blocked while waiting for tokens to be generated.
17 |
18 | We can end up in a situation where our server's CPU is almost idling, but it can't accept more requests due to an [Input/Output](/general-concepts/input-output) bottleneck.
19 |
20 | ```mermaid
21 | flowchart TD
22 | client[Client]
23 |
24 | client---|Request|response_handler
25 |
26 | subgraph worker[Worker]
27 | response_handler[Response Handler]---response["Response"]
28 | end
29 |
30 | subgraph llm[LLM]
31 | response_handler-->|Waits for|llmserver[Completion]
32 | llmserver-->response
33 | end
34 | ```
35 |
36 | While the tokens are generated, the worker is at a standstill and cannot accept additional requests.
37 |
38 | ## The Solution
39 |
40 | The solution is to use languages and frameworks that support long-running processes (that do not rely on spawning workers) and with any form of asynchronicity.
41 |
42 | The perfect example of such language is `Go` with its [goroutines](https://go.dev/tour/concurrency/1). Each goroutine uses just a few kilobytes of memory, and a server with a few gigabytes of RAM can potentially spawn millions of them. They run asynchronously and are preemptive, so there shouldn't be a situation where just a few requests can exhaust the entire server capacity.
43 |
--------------------------------------------------------------------------------
/src/application-layer/optimization/asynchronous-programming/README.md:
--------------------------------------------------------------------------------
1 | # Asynchronous Programming
2 |
3 | By asynchronous programming, we mean the ability to execute multiple tasks concurrently without blocking the main thread; that does not necessarily involve using threads and processes. A good example is the JavaScript execution model, which is, by default, single-threaded but asynchronous. It does not offer parallelism (without worker threads), but it can still issue concurrent network requests, database queries, etc.
4 |
5 | Considering that most of the bottlenecks related to working with Large Language Models stem from [Input/Output](/general-concepts/input-output) issues (primarily the LLM APIs response times and the time it takes to generate all the completion tokens) and not the CPU itself, asynchronous programming techniques are often the necessity when architecting the applications.
6 |
7 | When it comes to network requests, large language models pose a different challenge than most web applications. While most of the [REST](https://en.wikipedia.org/wiki/REST) APIs tend to have consistent response times below 100ms, when working with large language model web APIs, the response times might easily reach 20-30 seconds until all the requested tokens are generated and streamed.
8 |
9 | ## Affected Runtimes
10 |
11 | Scripting languages like PHP and Ruby are primarily affected because they are synchronous by default. That is especially cumbersome with PHP, which uses [FPM](https://www.php.net/manual/en/install.fpm.php) pool of workers as a common hosting method. For example, Debian's worker pool amounts to five workers by default. That means if each of them would be busy handling 30-second requests, the sixth request would have to wait for the first one to finish. That also means that you can easily run into a situation where your server's CPU is idling, but it can't accept more requests simultaneously.
12 |
13 | ## Coroutines, Promises to the Rescue
14 |
15 | To mitigate the issue, you can use any programming language supporting async, which primarily manifests in supporting Promises (Futures) or Coroutines. That includes JavaScript, Golang, Python (with `asyncio`), and PHP (with `Swoole`).
16 |
17 | ## Preemptive vs Cooperative Scheduling
18 |
19 | It is also really important to understand the preemptive aspect of async languages. Although preemptiveness is an aspect primarily of threading, it plays a role when scheduling promises and coroutines. For example, PHP natively implements [Fibers](https://www.php.net/manual/en/language.fibers.php), which grants it some degree of asynchronicity, although they are not preemptive. This means if you try something silly in your code, like, for example:
20 |
21 | ```php
22 |
5 | ```
6 |
7 | `Ollama` is a convenient and easy-to-use wrapper around `llama.cpp`.
8 |
9 | It acts like a `llama.cpp` multiplexer. You can start a new conversation or request completion from a specific LLM without manually downloading weights and setting them up.
10 |
11 | For example, when you request completion from a model that is not yet loaded, it checks if it is possible to fit that new model into RAM or VRAM. If so, it internally starts a new `llama.cpp` instance and uses it to load the LLM. If the requested model has not yet been downloaded, it will download it for you.
12 |
13 | In general terms, it acts like a `llama.cpp` [forward proxy](/general-concepts/load-balancing/forward-proxy.md) and a [supervisor](/general-concepts/load-balancing/supervisor.md).
14 |
15 | For example, if you load both `llama3` and `phi-3` into the same Ollama instance, you will get something like this:
16 |
17 | ```mermaid
18 | flowchart TD
19 | Ollama --> llama1[llama.cpp with llama3]
20 | Ollama --> llama2[llama.cpp with phi-3]
21 | llama1 --> VRAM
22 | llama2 --> VRAM
23 | ```
24 |
25 | ## Viability for Production
26 |
27 | ### Predictability
28 |
29 | Although the `Ollama` approach is convenient for local development, it causes some deployment problems (compared to `llama.cpp`).
30 |
31 | With `llama.cpp`, it is easily possible to divide the context of the loaded model into a specific number of slots, which makes it extremely easy to predict how many parallel requests the current server can handle.
32 |
33 | It is not easy to predict with `Ollama`. It manages the slots internally and does not expose the equivalent with the `llama.cpp` `/health` endpoint to monitor the currently used resources. Even if it did, it is always possible to have a few different models loaded simultaneously that share server resources.
34 |
35 | We might end up in a situation where `Ollama` keeps both [llama3](https://llama.meta.com/llama3/) (which is 70B parameter model) and [phi-3](https://azure.microsoft.com/en-us/blog/introducing-phi-3-redefining-whats-possible-with-slms/) (which is 3.8B parameter model). A completion request towards `llama3` will use many more resources than asking `phi-3` for completion. 8 slots of `llama3` require many more resources than 8 of `phi-3`.
36 |
37 | How can that be balanced effectively? As a software architect, you would have to plan an infrastructure that does not allow developers to randomly load models into memory and force a specific number of slots, which defeats the purpose of `Ollama`.
38 |
39 | ### Good Parts of Ollama
40 |
41 | I greatly support `Ollama` because it makes it easy to start your journey with large language models. You can use `Ollama` in production deployments, but `llama.cpp` is a better choice because it is predictable.
42 |
43 | `Ollama` is better suited than `llama.cpp` for end-user distributable applications. By that, I mean the applications that do not use an external server but are installed and run in their entirety on the user's device. The same thing that makes it less predictable regarding resource usage makes it more resilient to end-user errors. In that context, resource usage predictability is less important than on the server side.
44 |
45 | That is why this handbook is almost entirely based on [vanilla](https://en.wikipedia.org/wiki/Vanilla_software) `llama.cpp` as it is much better for server-side deployments (based on all the reasons above).
46 |
47 | The situation might change with the future `Ollama` releases.
48 |
--------------------------------------------------------------------------------
/src/deployments/llama.cpp/aws-ec2-cuda/README.md:
--------------------------------------------------------------------------------
1 | # Installing on AWS EC2 with CUDA
2 |
3 | This tutorial was tested on `g4dn.xlarge` instance with `Ubuntu 22.04` operating
4 | system. This tutorial was written explicitly to perform the installation on a `Ubuntu 22.04` machine.
5 |
6 | ## Installation Steps
7 |
8 | 1. Start an EC2 instance of any class with a GPU with CUDA support.
9 |
10 | If you want to compile llama.cpp on this instance, you will need at least 4GB for CUDA drivers and enough space for your LLM of choice. I recommend at least 30GB. Perform the following steps of this tutorial on the instance you started.
11 |
12 | 2. Install build dependencies:
13 | ```shell
14 | sudo apt update
15 | ```
16 | ```shell
17 | sudo apt install build-essential ccache
18 | ```
19 |
20 | 3. Install CUDA Toolkit (only the Base Installer). Download it and follow instructions from
21 |
22 |
23 | At the time of writing this tutorial, the highest available supported version of the Ubuntu version was 22.04. But do not fear! :) We'll get it to work with some minor workarounds (see the [Potential Errors](#potential-errors) section)
24 |
25 | 4. Install NVIDIA Drivers:
26 | ```shell
27 | sudo apt install nvidia-driver-555
28 | ```
29 |
30 | 5. Compile llama.cpp:
31 | ```shell
32 | git clone https://github.com/ggerganov/llama.cpp.git
33 | ```
34 | ```shell
35 | cd llama.cpp
36 | ```
37 | ```shell
38 | GGML_CUDA=1 make -j
39 | ```
40 | 5. Benchmark llama.cpp (optional):
41 |
42 | Follow the official tutorial if you intend to run the benchmark. However, keep using `GGML_CUDA=1 make` to compile the llama.cpp (do *not* use `LLAMA_CUBLAS=1`):
43 | https://github.com/ggerganov/llama.cpp/discussions/4225
44 |
45 | Instead of performing a model quantization yourself, you can download quantized models from Hugging Face. For example, `Mistral Instruct` you can download from https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/tree/main
46 |
47 | ## Potential Errors
48 |
49 | ### CUDA Architecture Must Be Explicitly Provided
50 |
51 | ```
52 | ERROR: For CUDA versions < 11.7 a target CUDA architecture must be explicitly
53 | provided via environment variable CUDA_DOCKER_ARCH, e.g. by running
54 | "export CUDA_DOCKER_ARCH=compute_XX" on Unix-like systems, where XX is the
55 | minimum compute capability that the code needs to run on. A list with compute
56 | capabilities can be found here: https://developer.nvidia.com/cuda-gpus
57 | ```
58 |
59 | You need to check the mentioned page (https://developer.nvidia.com/cuda-gpus)
60 | and pick the appropriate version for your instance's GPU. `g4dn` instances
61 | use T4 GPU, which would be `compute_75`.
62 |
63 | For example:
64 |
65 | ```shell
66 | CUDA_DOCKER_ARCH=compute_75 GGML_CUDA=1 make -j
67 | ```
68 |
69 | ### Failed to initialize CUDA
70 |
71 | ```
72 | ggml_cuda_init: failed to initialize CUDA: unknown error
73 | ```
74 |
75 | Sometimes can be solved with `sudo modprobe nvidia_uvm`.
76 |
77 | You can also create a Systemd unit that loads the module on boot:
78 |
79 | ```ini
80 | [Unit]
81 | After=nvidia-persistenced.service
82 |
83 | [Service]
84 | Type=oneshot
85 | ExecStart=/usr/sbin/modprobe nvidia_uvm
86 |
87 | [Install]
88 | WantedBy=multi-user.target
89 | ```
90 |
91 | ### NVCC not found
92 |
93 | ```
94 | /bin/sh: 1: nvcc: not found
95 | ```
96 |
97 | You need to add CUDA path to your shell environmental variables.
98 |
99 | For example, with Bash and CUDA 12:
100 |
101 | ```shell
102 | export PATH="/usr/local/cuda-12/bin:$PATH"
103 | ```
104 | ```shell
105 | export LD_LIBRARY_PATH="/usr/local/cuda-12/lib64:$LD_LIBRARY_PATH"
106 | ```
107 |
108 | ### cannot find -lcuda
109 |
110 | ```
111 | /usr/bin/ld: cannot find -lcuda: No such file or directory
112 | ```
113 |
114 | That means your Nvidia drivers are not installed. Install NVIDIA Drivers first.
115 |
116 | ### Cannot communicate with NVIDIA driver
117 |
118 | ```
119 | NVIDIA-SMI has failed because it couldn't communicate with the NVIDIA driver. Make sure that the latest NVIDIA driver is installed and running.
120 | ```
121 |
122 | If you installed the drivers, reboot the instance.
123 |
124 | ### Failed to decode the batch
125 |
126 | ```
127 | failed to decode the batch, n_batch = 0, ret = -1
128 | main: llama_decode() failed
129 | ```
130 |
131 | There are two potential causes of this issue.
132 |
133 | #### Option 1: Install NVIDIA drivers
134 |
135 | Make sure you have installed the CUDA Toolkit and NVIDIA drivers. If you do, restart your server and try again. Most likely, NVIDIA kernel modules are not loaded.
136 |
137 | ```shell
138 | sudo reboot
139 | ```
140 |
141 | #### Option 2: Use different benchmarking parameters
142 |
143 | For example, with `Mistral Instruct 7B` what worked for me is:
144 |
145 | ```shell
146 | ./llama-batched-bench -m ../mistral-7b-instruct-v0.2.Q4_K_M.gguf 2048 2048 512 0 999 128,256,512 128,256 1,2,4,8,16,32
147 | ```
148 |
--------------------------------------------------------------------------------
/src/deployments/llama.cpp/aws-image-builder/README.md:
--------------------------------------------------------------------------------
1 | # Installing with AWS Image Builder
2 |
3 | This tutorial explains how to install [llama.cpp](https://github.com/ggerganov/llama.cpp) with [AWS EC2 Image Builder](https://aws.amazon.com/image-builder/).
4 |
5 | By putting [llama.cpp](https://github.com/ggerganov/llama.cpp) in EC2 Image Builder pipeline, you can automatically build custom AMIs with [llama.cpp](https://github.com/ggerganov/llama.cpp) pre-installed.
6 |
7 | You can also use that AMI as a base and add your foundational model on top of it. Thanks to that, you can quickly scale up or down your [llama.cpp](https://github.com/ggerganov/llama.cpp) groups.
8 |
9 | We will repackage [the base EC2 tutorial](/deployments/llama.cpp/aws-ec2-cuda) as a set of Image Builder Components and Workflow.
10 |
11 | You can complete the tutorial steps either manually or by automating the setup with [Terraform](https://www.terraform.io/)/[OpenTofu](https://opentofu.org/). Terraform source files are linked to their respective tutorial steps.
12 |
13 | ## Installation Steps
14 |
15 | 1. Create an IAM `imagebuilder` role ([source file](terraform/aws/aws_iam_role_imagebuilder_role.tf))
16 |
17 | Go to the IAM Dashboard, click "Roles" from the left-hand menu, and select "AWS service" as the trusted entity type. Next, select "EC2" as the use case:
18 |
19 | 
20 |
21 | Next, assign the following policies:
22 |
23 | - `arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceforEC2Role`
24 | - `arn:aws:iam::aws:policy/EC2InstanceProfileForImageBuilderECRContainerBuilds`
25 | - `arn:aws:iam::aws:policy/EC2InstanceProfileForImageBuilder`
26 | - `arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore`
27 |
28 | Name your role (for example, "imagebuilder") and finish creating it. You should end up with permissions and trust relationships looking like this:
29 |
30 | 
31 | 
32 |
33 | 2. Create components.
34 |
35 | We'll need the following four components:
36 | * llama.cpp build dependencies. It needs to install `build-essentials` and `ccache` ([source file](terraform/aws/aws_imagebuilder_component_apt_build_essential.tf))
37 | * CUDA toolkit ([source file](terraform/aws/aws_imagebuilder_component_cuda_toolkit_12.tf))
38 | * NVIDIA driver ([source file](terraform/aws/aws_imagebuilder_component_apt_nvidia_driver_555.tf))
39 | * llama.cpp itself ([source file](terraform/aws/aws_imagebuilder_component_llamacpp_gpu_compute_75.tf))
40 |
41 | To create the component via GUI, navigate to EC2 Image Builder service on AWS. From there, select "Components" from the menu. We'll need to add four components that will act as the building blocks in our Image Builder pipeline. You can refer to [the generic EC2 tutorial for more details](tutorial-installing-llamacpp-aws-cuda.md) for more information.
42 |
43 | Click "Create component". Next, for each component:
44 |
45 | - Choose "Build" as the component type
46 | - Select "Linux" as the image OS
47 | - Select "Ubuntu 22.04" as the compatible OS version
48 |
49 | Provide the following as component names and contents in YAML format:
50 |
51 | **Component name: apt_build_essential**
52 | ```yaml
53 | name: apt_build_essential
54 | description: "Component to install build essentials on Ubuntu"
55 | schemaVersion: '1.0'
56 | phases:
57 | - name: build
58 | steps:
59 | - name: InstallBuildEssential
60 | action: ExecuteBash
61 | inputs:
62 | commands:
63 | - sudo apt-get update
64 | - DEBIAN_FRONTEND=noninteractive sudo apt-get install -yq build-essential ccache
65 | onFailure: Abort
66 | timeoutSeconds: 180
67 | ```
68 |
69 |
70 | **Component name: apt_nvidia_driver_555**
71 | ```yaml
72 | name: apt_nvidia_driver_555
73 | description: "Component to install NVIDIA driver 550 on Ubuntu"
74 | schemaVersion: '1.0'
75 | phases:
76 | - name: build
77 | steps:
78 | - name: apt_nvidia_driver_555
79 | action: ExecuteBash
80 | inputs:
81 | commands:
82 | - sudo apt-get update
83 | - DEBIAN_FRONTEND=noninteractive sudo apt-get install -yq nvidia-driver-550
84 | onFailure: Abort
85 | timeoutSeconds: 180
86 | - name: reboot
87 | action: Reboot
88 | ```
89 |
90 |
91 | **Component name: cuda_toolkit_12**
92 | ```yaml
93 | name: cuda_toolkit_12
94 | description: "Component to install CUDA Toolkit 12 on Ubuntu"
95 | schemaVersion: '1.0'
96 | phases:
97 | - name: build
98 | steps:
99 | - name: apt_cuda_toolkit_12
100 | action: ExecuteBash
101 | inputs:
102 | commands:
103 | - DEBIAN_FRONTEND=noninteractive sudo apt-get -yq install nvidia-cuda-toolkit
104 | onFailure: Abort
105 | timeoutSeconds: 600
106 | - name: reboot
107 | action: Reboot
108 | ```
109 |
110 |
111 | **Component name: llamacpp_gpu_compute_75**
112 | ```yaml
113 | name: llamacpp_gpu_compute_75
114 | description: "Component to install and compile llama.cpp with CUDA compute capability 75 on Ubuntu"
115 | schemaVersion: '1.0'
116 | phases:
117 | - name: build
118 | steps:
119 | - name: compile
120 | action: ExecuteBash
121 | inputs:
122 | commands:
123 | - cd /opt
124 | - git clone https://github.com/ggerganov/llama.cpp.git
125 | - cd llama.cpp
126 | - |
127 | CUDA_DOCKER_ARCH=compute_75 \
128 | LD_LIBRARY_PATH="/usr/local/cuda-12/lib64:$LD_LIBRARY_PATH" \
129 | GGML_CUDA=1 \
130 | PATH="/usr/local/cuda-12/bin:$PATH" \
131 | make -j
132 | onFailure: Abort
133 | timeoutSeconds: 1200
134 | ```
135 |
136 | Once you're finished, you'll see all the created components you added on the list:
137 |
138 | 
139 |
140 | 3. Add Infrastructure Configuration [source file](terraform/aws/aws_imagebuilder_infrastructure_configuration_llamacpp_gpu_compute_75.tf)
141 |
142 | Next, we'll create a new Infrastructure Configuration. Select it from the left-hand menu and click "Create". You'll need to use `g4dn.xlarge` instance type or any other instance type that supports CUDA. Name your configuration, select the IAM role you created in step 1, and select the instance, for example:
143 |
144 | 
145 |
146 | 4. Add Distribution Configuration [source file](terraform/aws/aws_imagebuilder_distribution_configuration_compute_75.tf)
147 |
148 | Select Distribution settings in the left-hand menu to create a Distribution Configuration. It specifies how the AMI should be distributed (on what type of base AMI it will be published). Select Amazon Machine Image, name the configuration, and save:
149 |
150 | 
151 |
152 | 5. Add Image Pipeline [source file](terraform/aws/aws_imagebuilder_image_pipeline_llamacpp_gpu_compute_75.tf)
153 |
154 | Next, we'll add the Image Pipeline. It will use the Components, Infrastructure Configuration, and Distribution Configuration we prepared previously. Select "Imagie Pipeline" from the left-hand menu and click "Create". Name your image pipeline, and select the desired build schedule.
155 |
156 | As the second step, create a new recipe. Choose AMI, name the recipe:
157 |
158 | 
159 |
160 | Next, select the previously created components:
161 |
162 | 
163 |
164 | 6. The next step is to build the image. You should be able to run the pipeline:
165 |
166 | 
167 |
168 | 7. Launch test EC2 Instance.
169 |
170 | When launching EC2 instance, the llama.cpp image we prepared should be available under `My AMIs` list:
171 |
172 | 
173 |
174 |
175 | ## Summary
176 |
177 | Feel free to open an issue if you find a bug in the tutorial or have ideas on how to improve it.
178 |
--------------------------------------------------------------------------------
/mdbook-admonish.css:
--------------------------------------------------------------------------------
1 | @charset "UTF-8";
2 | :is(.admonition) {
3 | display: flow-root;
4 | margin: 1.5625em 0;
5 | padding: 0 1.2rem;
6 | color: var(--fg);
7 | page-break-inside: avoid;
8 | background-color: var(--bg);
9 | border: 0 solid black;
10 | border-inline-start-width: 0.4rem;
11 | border-radius: 0.2rem;
12 | box-shadow: 0 0.2rem 1rem rgba(0, 0, 0, 0.05), 0 0 0.1rem rgba(0, 0, 0, 0.1);
13 | }
14 | @media print {
15 | :is(.admonition) {
16 | box-shadow: none;
17 | }
18 | }
19 | :is(.admonition) > * {
20 | box-sizing: border-box;
21 | }
22 | :is(.admonition) :is(.admonition) {
23 | margin-top: 1em;
24 | margin-bottom: 1em;
25 | }
26 | :is(.admonition) > .tabbed-set:only-child {
27 | margin-top: 0;
28 | }
29 | html :is(.admonition) > :last-child {
30 | margin-bottom: 1.2rem;
31 | }
32 |
33 | a.admonition-anchor-link {
34 | display: none;
35 | position: absolute;
36 | left: -1.2rem;
37 | padding-right: 1rem;
38 | }
39 | a.admonition-anchor-link:link, a.admonition-anchor-link:visited {
40 | color: var(--fg);
41 | }
42 | a.admonition-anchor-link:link:hover, a.admonition-anchor-link:visited:hover {
43 | text-decoration: none;
44 | }
45 | a.admonition-anchor-link::before {
46 | content: "§";
47 | }
48 |
49 | :is(.admonition-title, summary.admonition-title) {
50 | position: relative;
51 | min-height: 4rem;
52 | margin-block: 0;
53 | margin-inline: -1.6rem -1.2rem;
54 | padding-block: 0.8rem;
55 | padding-inline: 4.4rem 1.2rem;
56 | font-weight: 700;
57 | background-color: rgba(68, 138, 255, 0.1);
58 | print-color-adjust: exact;
59 | -webkit-print-color-adjust: exact;
60 | display: flex;
61 | }
62 | :is(.admonition-title, summary.admonition-title) p {
63 | margin: 0;
64 | }
65 | html :is(.admonition-title, summary.admonition-title):last-child {
66 | margin-bottom: 0;
67 | }
68 | :is(.admonition-title, summary.admonition-title)::before {
69 | position: absolute;
70 | top: 0.625em;
71 | inset-inline-start: 1.6rem;
72 | width: 2rem;
73 | height: 2rem;
74 | background-color: #448aff;
75 | print-color-adjust: exact;
76 | -webkit-print-color-adjust: exact;
77 | mask-image: url('data:image/svg+xml;charset=utf-8,');
78 | -webkit-mask-image: url('data:image/svg+xml;charset=utf-8,');
79 | mask-repeat: no-repeat;
80 | -webkit-mask-repeat: no-repeat;
81 | mask-size: contain;
82 | -webkit-mask-size: contain;
83 | content: "";
84 | }
85 | :is(.admonition-title, summary.admonition-title):hover a.admonition-anchor-link {
86 | display: initial;
87 | }
88 |
89 | details.admonition > summary.admonition-title::after {
90 | position: absolute;
91 | top: 0.625em;
92 | inset-inline-end: 1.6rem;
93 | height: 2rem;
94 | width: 2rem;
95 | background-color: currentcolor;
96 | mask-image: var(--md-details-icon);
97 | -webkit-mask-image: var(--md-details-icon);
98 | mask-repeat: no-repeat;
99 | -webkit-mask-repeat: no-repeat;
100 | mask-size: contain;
101 | -webkit-mask-size: contain;
102 | content: "";
103 | transform: rotate(0deg);
104 | transition: transform 0.25s;
105 | }
106 | details[open].admonition > summary.admonition-title::after {
107 | transform: rotate(90deg);
108 | }
109 |
110 | :root {
111 | --md-details-icon: url("data:image/svg+xml;charset=utf-8,");
112 | }
113 |
114 | :root {
115 | --md-admonition-icon--admonish-note: url("data:image/svg+xml;charset=utf-8,");
116 | --md-admonition-icon--admonish-abstract: url("data:image/svg+xml;charset=utf-8,");
117 | --md-admonition-icon--admonish-info: url("data:image/svg+xml;charset=utf-8,");
118 | --md-admonition-icon--admonish-tip: url("data:image/svg+xml;charset=utf-8,");
119 | --md-admonition-icon--admonish-success: url("data:image/svg+xml;charset=utf-8,");
120 | --md-admonition-icon--admonish-question: url("data:image/svg+xml;charset=utf-8,");
121 | --md-admonition-icon--admonish-warning: url("data:image/svg+xml;charset=utf-8,");
122 | --md-admonition-icon--admonish-failure: url("data:image/svg+xml;charset=utf-8,");
123 | --md-admonition-icon--admonish-danger: url("data:image/svg+xml;charset=utf-8,");
124 | --md-admonition-icon--admonish-bug: url("data:image/svg+xml;charset=utf-8,");
125 | --md-admonition-icon--admonish-example: url("data:image/svg+xml;charset=utf-8,");
126 | --md-admonition-icon--admonish-quote: url("data:image/svg+xml;charset=utf-8,");
127 | }
128 |
129 | :is(.admonition):is(.admonish-note) {
130 | border-color: #448aff;
131 | }
132 |
133 | :is(.admonish-note) > :is(.admonition-title, summary.admonition-title) {
134 | background-color: rgba(68, 138, 255, 0.1);
135 | }
136 | :is(.admonish-note) > :is(.admonition-title, summary.admonition-title)::before {
137 | background-color: #448aff;
138 | mask-image: var(--md-admonition-icon--admonish-note);
139 | -webkit-mask-image: var(--md-admonition-icon--admonish-note);
140 | mask-repeat: no-repeat;
141 | -webkit-mask-repeat: no-repeat;
142 | mask-size: contain;
143 | -webkit-mask-repeat: no-repeat;
144 | }
145 |
146 | :is(.admonition):is(.admonish-abstract, .admonish-summary, .admonish-tldr) {
147 | border-color: #00b0ff;
148 | }
149 |
150 | :is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title) {
151 | background-color: rgba(0, 176, 255, 0.1);
152 | }
153 | :is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title)::before {
154 | background-color: #00b0ff;
155 | mask-image: var(--md-admonition-icon--admonish-abstract);
156 | -webkit-mask-image: var(--md-admonition-icon--admonish-abstract);
157 | mask-repeat: no-repeat;
158 | -webkit-mask-repeat: no-repeat;
159 | mask-size: contain;
160 | -webkit-mask-repeat: no-repeat;
161 | }
162 |
163 | :is(.admonition):is(.admonish-info, .admonish-todo) {
164 | border-color: #00b8d4;
165 | }
166 |
167 | :is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title) {
168 | background-color: rgba(0, 184, 212, 0.1);
169 | }
170 | :is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title)::before {
171 | background-color: #00b8d4;
172 | mask-image: var(--md-admonition-icon--admonish-info);
173 | -webkit-mask-image: var(--md-admonition-icon--admonish-info);
174 | mask-repeat: no-repeat;
175 | -webkit-mask-repeat: no-repeat;
176 | mask-size: contain;
177 | -webkit-mask-repeat: no-repeat;
178 | }
179 |
180 | :is(.admonition):is(.admonish-tip, .admonish-hint, .admonish-important) {
181 | border-color: #00bfa5;
182 | }
183 |
184 | :is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title) {
185 | background-color: rgba(0, 191, 165, 0.1);
186 | }
187 | :is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title)::before {
188 | background-color: #00bfa5;
189 | mask-image: var(--md-admonition-icon--admonish-tip);
190 | -webkit-mask-image: var(--md-admonition-icon--admonish-tip);
191 | mask-repeat: no-repeat;
192 | -webkit-mask-repeat: no-repeat;
193 | mask-size: contain;
194 | -webkit-mask-repeat: no-repeat;
195 | }
196 |
197 | :is(.admonition):is(.admonish-success, .admonish-check, .admonish-done) {
198 | border-color: #00c853;
199 | }
200 |
201 | :is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title) {
202 | background-color: rgba(0, 200, 83, 0.1);
203 | }
204 | :is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title)::before {
205 | background-color: #00c853;
206 | mask-image: var(--md-admonition-icon--admonish-success);
207 | -webkit-mask-image: var(--md-admonition-icon--admonish-success);
208 | mask-repeat: no-repeat;
209 | -webkit-mask-repeat: no-repeat;
210 | mask-size: contain;
211 | -webkit-mask-repeat: no-repeat;
212 | }
213 |
214 | :is(.admonition):is(.admonish-question, .admonish-help, .admonish-faq) {
215 | border-color: #64dd17;
216 | }
217 |
218 | :is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title) {
219 | background-color: rgba(100, 221, 23, 0.1);
220 | }
221 | :is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title)::before {
222 | background-color: #64dd17;
223 | mask-image: var(--md-admonition-icon--admonish-question);
224 | -webkit-mask-image: var(--md-admonition-icon--admonish-question);
225 | mask-repeat: no-repeat;
226 | -webkit-mask-repeat: no-repeat;
227 | mask-size: contain;
228 | -webkit-mask-repeat: no-repeat;
229 | }
230 |
231 | :is(.admonition):is(.admonish-warning, .admonish-caution, .admonish-attention) {
232 | border-color: #ff9100;
233 | }
234 |
235 | :is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title) {
236 | background-color: rgba(255, 145, 0, 0.1);
237 | }
238 | :is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title)::before {
239 | background-color: #ff9100;
240 | mask-image: var(--md-admonition-icon--admonish-warning);
241 | -webkit-mask-image: var(--md-admonition-icon--admonish-warning);
242 | mask-repeat: no-repeat;
243 | -webkit-mask-repeat: no-repeat;
244 | mask-size: contain;
245 | -webkit-mask-repeat: no-repeat;
246 | }
247 |
248 | :is(.admonition):is(.admonish-failure, .admonish-fail, .admonish-missing) {
249 | border-color: #ff5252;
250 | }
251 |
252 | :is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title) {
253 | background-color: rgba(255, 82, 82, 0.1);
254 | }
255 | :is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title)::before {
256 | background-color: #ff5252;
257 | mask-image: var(--md-admonition-icon--admonish-failure);
258 | -webkit-mask-image: var(--md-admonition-icon--admonish-failure);
259 | mask-repeat: no-repeat;
260 | -webkit-mask-repeat: no-repeat;
261 | mask-size: contain;
262 | -webkit-mask-repeat: no-repeat;
263 | }
264 |
265 | :is(.admonition):is(.admonish-danger, .admonish-error) {
266 | border-color: #ff1744;
267 | }
268 |
269 | :is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title) {
270 | background-color: rgba(255, 23, 68, 0.1);
271 | }
272 | :is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title)::before {
273 | background-color: #ff1744;
274 | mask-image: var(--md-admonition-icon--admonish-danger);
275 | -webkit-mask-image: var(--md-admonition-icon--admonish-danger);
276 | mask-repeat: no-repeat;
277 | -webkit-mask-repeat: no-repeat;
278 | mask-size: contain;
279 | -webkit-mask-repeat: no-repeat;
280 | }
281 |
282 | :is(.admonition):is(.admonish-bug) {
283 | border-color: #f50057;
284 | }
285 |
286 | :is(.admonish-bug) > :is(.admonition-title, summary.admonition-title) {
287 | background-color: rgba(245, 0, 87, 0.1);
288 | }
289 | :is(.admonish-bug) > :is(.admonition-title, summary.admonition-title)::before {
290 | background-color: #f50057;
291 | mask-image: var(--md-admonition-icon--admonish-bug);
292 | -webkit-mask-image: var(--md-admonition-icon--admonish-bug);
293 | mask-repeat: no-repeat;
294 | -webkit-mask-repeat: no-repeat;
295 | mask-size: contain;
296 | -webkit-mask-repeat: no-repeat;
297 | }
298 |
299 | :is(.admonition):is(.admonish-example) {
300 | border-color: #7c4dff;
301 | }
302 |
303 | :is(.admonish-example) > :is(.admonition-title, summary.admonition-title) {
304 | background-color: rgba(124, 77, 255, 0.1);
305 | }
306 | :is(.admonish-example) > :is(.admonition-title, summary.admonition-title)::before {
307 | background-color: #7c4dff;
308 | mask-image: var(--md-admonition-icon--admonish-example);
309 | -webkit-mask-image: var(--md-admonition-icon--admonish-example);
310 | mask-repeat: no-repeat;
311 | -webkit-mask-repeat: no-repeat;
312 | mask-size: contain;
313 | -webkit-mask-repeat: no-repeat;
314 | }
315 |
316 | :is(.admonition):is(.admonish-quote, .admonish-cite) {
317 | border-color: #9e9e9e;
318 | }
319 |
320 | :is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title) {
321 | background-color: rgba(158, 158, 158, 0.1);
322 | }
323 | :is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title)::before {
324 | background-color: #9e9e9e;
325 | mask-image: var(--md-admonition-icon--admonish-quote);
326 | -webkit-mask-image: var(--md-admonition-icon--admonish-quote);
327 | mask-repeat: no-repeat;
328 | -webkit-mask-repeat: no-repeat;
329 | mask-size: contain;
330 | -webkit-mask-repeat: no-repeat;
331 | }
332 |
333 | .navy :is(.admonition) {
334 | background-color: var(--sidebar-bg);
335 | }
336 |
337 | .ayu :is(.admonition),
338 | .coal :is(.admonition) {
339 | background-color: var(--theme-hover);
340 | }
341 |
342 | .rust :is(.admonition) {
343 | background-color: var(--sidebar-bg);
344 | color: var(--sidebar-fg);
345 | }
346 | .rust .admonition-anchor-link:link, .rust .admonition-anchor-link:visited {
347 | color: var(--sidebar-fg);
348 | }
349 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Attribution-ShareAlike 4.0 International
2 |
3 | =======================================================================
4 |
5 | Creative Commons Corporation ("Creative Commons") is not a law firm and
6 | does not provide legal services or legal advice. Distribution of
7 | Creative Commons public licenses does not create a lawyer-client or
8 | other relationship. Creative Commons makes its licenses and related
9 | information available on an "as-is" basis. Creative Commons gives no
10 | warranties regarding its licenses, any material licensed under their
11 | terms and conditions, or any related information. Creative Commons
12 | disclaims all liability for damages resulting from their use to the
13 | fullest extent possible.
14 |
15 | Using Creative Commons Public Licenses
16 |
17 | Creative Commons public licenses provide a standard set of terms and
18 | conditions that creators and other rights holders may use to share
19 | original works of authorship and other material subject to copyright
20 | and certain other rights specified in the public license below. The
21 | following considerations are for informational purposes only, are not
22 | exhaustive, and do not form part of our licenses.
23 |
24 | Considerations for licensors: Our public licenses are
25 | intended for use by those authorized to give the public
26 | permission to use material in ways otherwise restricted by
27 | copyright and certain other rights. Our licenses are
28 | irrevocable. Licensors should read and understand the terms
29 | and conditions of the license they choose before applying it.
30 | Licensors should also secure all rights necessary before
31 | applying our licenses so that the public can reuse the
32 | material as expected. Licensors should clearly mark any
33 | material not subject to the license. This includes other CC-
34 | licensed material, or material used under an exception or
35 | limitation to copyright. More considerations for licensors:
36 | wiki.creativecommons.org/Considerations_for_licensors
37 |
38 | Considerations for the public: By using one of our public
39 | licenses, a licensor grants the public permission to use the
40 | licensed material under specified terms and conditions. If
41 | the licensor's permission is not necessary for any reason--for
42 | example, because of any applicable exception or limitation to
43 | copyright--then that use is not regulated by the license. Our
44 | licenses grant only permissions under copyright and certain
45 | other rights that a licensor has authority to grant. Use of
46 | the licensed material may still be restricted for other
47 | reasons, including because others have copyright or other
48 | rights in the material. A licensor may make special requests,
49 | such as asking that all changes be marked or described.
50 | Although not required by our licenses, you are encouraged to
51 | respect those requests where reasonable. More considerations
52 | for the public:
53 | wiki.creativecommons.org/Considerations_for_licensees
54 |
55 | =======================================================================
56 |
57 | Creative Commons Attribution-ShareAlike 4.0 International Public
58 | License
59 |
60 | By exercising the Licensed Rights (defined below), You accept and agree
61 | to be bound by the terms and conditions of this Creative Commons
62 | Attribution-ShareAlike 4.0 International Public License ("Public
63 | License"). To the extent this Public License may be interpreted as a
64 | contract, You are granted the Licensed Rights in consideration of Your
65 | acceptance of these terms and conditions, and the Licensor grants You
66 | such rights in consideration of benefits the Licensor receives from
67 | making the Licensed Material available under these terms and
68 | conditions.
69 |
70 |
71 | Section 1 -- Definitions.
72 |
73 | a. Adapted Material means material subject to Copyright and Similar
74 | Rights that is derived from or based upon the Licensed Material
75 | and in which the Licensed Material is translated, altered,
76 | arranged, transformed, or otherwise modified in a manner requiring
77 | permission under the Copyright and Similar Rights held by the
78 | Licensor. For purposes of this Public License, where the Licensed
79 | Material is a musical work, performance, or sound recording,
80 | Adapted Material is always produced where the Licensed Material is
81 | synched in timed relation with a moving image.
82 |
83 | b. Adapter's License means the license You apply to Your Copyright
84 | and Similar Rights in Your contributions to Adapted Material in
85 | accordance with the terms and conditions of this Public License.
86 |
87 | c. BY-SA Compatible License means a license listed at
88 | creativecommons.org/compatiblelicenses, approved by Creative
89 | Commons as essentially the equivalent of this Public License.
90 |
91 | d. Copyright and Similar Rights means copyright and/or similar rights
92 | closely related to copyright including, without limitation,
93 | performance, broadcast, sound recording, and Sui Generis Database
94 | Rights, without regard to how the rights are labeled or
95 | categorized. For purposes of this Public License, the rights
96 | specified in Section 2(b)(1)-(2) are not Copyright and Similar
97 | Rights.
98 |
99 | e. Effective Technological Measures means those measures that, in the
100 | absence of proper authority, may not be circumvented under laws
101 | fulfilling obligations under Article 11 of the WIPO Copyright
102 | Treaty adopted on December 20, 1996, and/or similar international
103 | agreements.
104 |
105 | f. Exceptions and Limitations means fair use, fair dealing, and/or
106 | any other exception or limitation to Copyright and Similar Rights
107 | that applies to Your use of the Licensed Material.
108 |
109 | g. License Elements means the license attributes listed in the name
110 | of a Creative Commons Public License. The License Elements of this
111 | Public License are Attribution and ShareAlike.
112 |
113 | h. Licensed Material means the artistic or literary work, database,
114 | or other material to which the Licensor applied this Public
115 | License.
116 |
117 | i. Licensed Rights means the rights granted to You subject to the
118 | terms and conditions of this Public License, which are limited to
119 | all Copyright and Similar Rights that apply to Your use of the
120 | Licensed Material and that the Licensor has authority to license.
121 |
122 | j. Licensor means the individual(s) or entity(ies) granting rights
123 | under this Public License.
124 |
125 | k. Share means to provide material to the public by any means or
126 | process that requires permission under the Licensed Rights, such
127 | as reproduction, public display, public performance, distribution,
128 | dissemination, communication, or importation, and to make material
129 | available to the public including in ways that members of the
130 | public may access the material from a place and at a time
131 | individually chosen by them.
132 |
133 | l. Sui Generis Database Rights means rights other than copyright
134 | resulting from Directive 96/9/EC of the European Parliament and of
135 | the Council of 11 March 1996 on the legal protection of databases,
136 | as amended and/or succeeded, as well as other essentially
137 | equivalent rights anywhere in the world.
138 |
139 | m. You means the individual or entity exercising the Licensed Rights
140 | under this Public License. Your has a corresponding meaning.
141 |
142 |
143 | Section 2 -- Scope.
144 |
145 | a. License grant.
146 |
147 | 1. Subject to the terms and conditions of this Public License,
148 | the Licensor hereby grants You a worldwide, royalty-free,
149 | non-sublicensable, non-exclusive, irrevocable license to
150 | exercise the Licensed Rights in the Licensed Material to:
151 |
152 | a. reproduce and Share the Licensed Material, in whole or
153 | in part; and
154 |
155 | b. produce, reproduce, and Share Adapted Material.
156 |
157 | 2. Exceptions and Limitations. For the avoidance of doubt, where
158 | Exceptions and Limitations apply to Your use, this Public
159 | License does not apply, and You do not need to comply with
160 | its terms and conditions.
161 |
162 | 3. Term. The term of this Public License is specified in Section
163 | 6(a).
164 |
165 | 4. Media and formats; technical modifications allowed. The
166 | Licensor authorizes You to exercise the Licensed Rights in
167 | all media and formats whether now known or hereafter created,
168 | and to make technical modifications necessary to do so. The
169 | Licensor waives and/or agrees not to assert any right or
170 | authority to forbid You from making technical modifications
171 | necessary to exercise the Licensed Rights, including
172 | technical modifications necessary to circumvent Effective
173 | Technological Measures. For purposes of this Public License,
174 | simply making modifications authorized by this Section 2(a)
175 | (4) never produces Adapted Material.
176 |
177 | 5. Downstream recipients.
178 |
179 | a. Offer from the Licensor -- Licensed Material. Every
180 | recipient of the Licensed Material automatically
181 | receives an offer from the Licensor to exercise the
182 | Licensed Rights under the terms and conditions of this
183 | Public License.
184 |
185 | b. Additional offer from the Licensor -- Adapted Material.
186 | Every recipient of Adapted Material from You
187 | automatically receives an offer from the Licensor to
188 | exercise the Licensed Rights in the Adapted Material
189 | under the conditions of the Adapter's License You apply.
190 |
191 | c. No downstream restrictions. You may not offer or impose
192 | any additional or different terms or conditions on, or
193 | apply any Effective Technological Measures to, the
194 | Licensed Material if doing so restricts exercise of the
195 | Licensed Rights by any recipient of the Licensed
196 | Material.
197 |
198 | 6. No endorsement. Nothing in this Public License constitutes or
199 | may be construed as permission to assert or imply that You
200 | are, or that Your use of the Licensed Material is, connected
201 | with, or sponsored, endorsed, or granted official status by,
202 | the Licensor or others designated to receive attribution as
203 | provided in Section 3(a)(1)(A)(i).
204 |
205 | b. Other rights.
206 |
207 | 1. Moral rights, such as the right of integrity, are not
208 | licensed under this Public License, nor are publicity,
209 | privacy, and/or other similar personality rights; however, to
210 | the extent possible, the Licensor waives and/or agrees not to
211 | assert any such rights held by the Licensor to the limited
212 | extent necessary to allow You to exercise the Licensed
213 | Rights, but not otherwise.
214 |
215 | 2. Patent and trademark rights are not licensed under this
216 | Public License.
217 |
218 | 3. To the extent possible, the Licensor waives any right to
219 | collect royalties from You for the exercise of the Licensed
220 | Rights, whether directly or through a collecting society
221 | under any voluntary or waivable statutory or compulsory
222 | licensing scheme. In all other cases the Licensor expressly
223 | reserves any right to collect such royalties.
224 |
225 |
226 | Section 3 -- License Conditions.
227 |
228 | Your exercise of the Licensed Rights is expressly made subject to the
229 | following conditions.
230 |
231 | a. Attribution.
232 |
233 | 1. If You Share the Licensed Material (including in modified
234 | form), You must:
235 |
236 | a. retain the following if it is supplied by the Licensor
237 | with the Licensed Material:
238 |
239 | i. identification of the creator(s) of the Licensed
240 | Material and any others designated to receive
241 | attribution, in any reasonable manner requested by
242 | the Licensor (including by pseudonym if
243 | designated);
244 |
245 | ii. a copyright notice;
246 |
247 | iii. a notice that refers to this Public License;
248 |
249 | iv. a notice that refers to the disclaimer of
250 | warranties;
251 |
252 | v. a URI or hyperlink to the Licensed Material to the
253 | extent reasonably practicable;
254 |
255 | b. indicate if You modified the Licensed Material and
256 | retain an indication of any previous modifications; and
257 |
258 | c. indicate the Licensed Material is licensed under this
259 | Public License, and include the text of, or the URI or
260 | hyperlink to, this Public License.
261 |
262 | 2. You may satisfy the conditions in Section 3(a)(1) in any
263 | reasonable manner based on the medium, means, and context in
264 | which You Share the Licensed Material. For example, it may be
265 | reasonable to satisfy the conditions by providing a URI or
266 | hyperlink to a resource that includes the required
267 | information.
268 |
269 | 3. If requested by the Licensor, You must remove any of the
270 | information required by Section 3(a)(1)(A) to the extent
271 | reasonably practicable.
272 |
273 | b. ShareAlike.
274 |
275 | In addition to the conditions in Section 3(a), if You Share
276 | Adapted Material You produce, the following conditions also apply.
277 |
278 | 1. The Adapter's License You apply must be a Creative Commons
279 | license with the same License Elements, this version or
280 | later, or a BY-SA Compatible License.
281 |
282 | 2. You must include the text of, or the URI or hyperlink to, the
283 | Adapter's License You apply. You may satisfy this condition
284 | in any reasonable manner based on the medium, means, and
285 | context in which You Share Adapted Material.
286 |
287 | 3. You may not offer or impose any additional or different terms
288 | or conditions on, or apply any Effective Technological
289 | Measures to, Adapted Material that restrict exercise of the
290 | rights granted under the Adapter's License You apply.
291 |
292 |
293 | Section 4 -- Sui Generis Database Rights.
294 |
295 | Where the Licensed Rights include Sui Generis Database Rights that
296 | apply to Your use of the Licensed Material:
297 |
298 | a. for the avoidance of doubt, Section 2(a)(1) grants You the right
299 | to extract, reuse, reproduce, and Share all or a substantial
300 | portion of the contents of the database;
301 |
302 | b. if You include all or a substantial portion of the database
303 | contents in a database in which You have Sui Generis Database
304 | Rights, then the database in which You have Sui Generis Database
305 | Rights (but not its individual contents) is Adapted Material,
306 | including for purposes of Section 3(b); and
307 |
308 | c. You must comply with the conditions in Section 3(a) if You Share
309 | all or a substantial portion of the contents of the database.
310 |
311 | For the avoidance of doubt, this Section 4 supplements and does not
312 | replace Your obligations under this Public License where the Licensed
313 | Rights include other Copyright and Similar Rights.
314 |
315 |
316 | Section 5 -- Disclaimer of Warranties and Limitation of Liability.
317 |
318 | a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
319 | EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
320 | AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
321 | ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
322 | IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
323 | WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
324 | PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
325 | ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
326 | KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
327 | ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
328 |
329 | b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
330 | TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
331 | NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
332 | INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
333 | COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
334 | USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
335 | ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
336 | DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
337 | IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
338 |
339 | c. The disclaimer of warranties and limitation of liability provided
340 | above shall be interpreted in a manner that, to the extent
341 | possible, most closely approximates an absolute disclaimer and
342 | waiver of all liability.
343 |
344 |
345 | Section 6 -- Term and Termination.
346 |
347 | a. This Public License applies for the term of the Copyright and
348 | Similar Rights licensed here. However, if You fail to comply with
349 | this Public License, then Your rights under this Public License
350 | terminate automatically.
351 |
352 | b. Where Your right to use the Licensed Material has terminated under
353 | Section 6(a), it reinstates:
354 |
355 | 1. automatically as of the date the violation is cured, provided
356 | it is cured within 30 days of Your discovery of the
357 | violation; or
358 |
359 | 2. upon express reinstatement by the Licensor.
360 |
361 | For the avoidance of doubt, this Section 6(b) does not affect any
362 | right the Licensor may have to seek remedies for Your violations
363 | of this Public License.
364 |
365 | c. For the avoidance of doubt, the Licensor may also offer the
366 | Licensed Material under separate terms or conditions or stop
367 | distributing the Licensed Material at any time; however, doing so
368 | will not terminate this Public License.
369 |
370 | d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
371 | License.
372 |
373 |
374 | Section 7 -- Other Terms and Conditions.
375 |
376 | a. The Licensor shall not be bound by any additional or different
377 | terms or conditions communicated by You unless expressly agreed.
378 |
379 | b. Any arrangements, understandings, or agreements regarding the
380 | Licensed Material not stated herein are separate from and
381 | independent of the terms and conditions of this Public License.
382 |
383 |
384 | Section 8 -- Interpretation.
385 |
386 | a. For the avoidance of doubt, this Public License does not, and
387 | shall not be interpreted to, reduce, limit, restrict, or impose
388 | conditions on any use of the Licensed Material that could lawfully
389 | be made without permission under this Public License.
390 |
391 | b. To the extent possible, if any provision of this Public License is
392 | deemed unenforceable, it shall be automatically reformed to the
393 | minimum extent necessary to make it enforceable. If the provision
394 | cannot be reformed, it shall be severed from this Public License
395 | without affecting the enforceability of the remaining terms and
396 | conditions.
397 |
398 | c. No term or condition of this Public License will be waived and no
399 | failure to comply consented to unless expressly agreed to by the
400 | Licensor.
401 |
402 | d. Nothing in this Public License constitutes or may be interpreted
403 | as a limitation upon, or waiver of, any privileges and immunities
404 | that apply to the Licensor or You, including from the legal
405 | processes of any jurisdiction or authority.
406 |
407 |
408 | =======================================================================
409 |
410 | Creative Commons is not a party to its public licenses.
411 | Notwithstanding, Creative Commons may elect to apply one of its public
412 | licenses to material it publishes and in those instances will be
413 | considered the “Licensor.” The text of the Creative Commons public
414 | licenses is dedicated to the public domain under the CC0 Public Domain
415 | Dedication. Except for the limited purpose of indicating that material
416 | is shared under a Creative Commons public license or as otherwise
417 | permitted by the Creative Commons policies published at
418 | creativecommons.org/policies, Creative Commons does not authorize the
419 | use of the trademark "Creative Commons" or any other trademark or logo
420 | of Creative Commons without its prior written consent including,
421 | without limitation, in connection with any unauthorized modifications
422 | to any of its public licenses or any other arrangements,
423 | understandings, or agreements concerning use of licensed material. For
424 | the avoidance of doubt, this paragraph does not form part of the public
425 | licenses.
426 |
427 | Creative Commons may be contacted at creativecommons.org.
428 |
--------------------------------------------------------------------------------
/yarn.lock:
--------------------------------------------------------------------------------
1 | # This file is generated by running "yarn install" inside your project.
2 | # Manual changes might be lost - proceed with caution!
3 |
4 | __metadata:
5 | version: 8
6 | cacheKey: 10c0
7 |
8 | "@braintree/sanitize-url@npm:^6.0.1":
9 | version: 6.0.4
10 | resolution: "@braintree/sanitize-url@npm:6.0.4"
11 | checksum: 5d7bac57f3e49931db83f65aaa4fd22f96caa323bf0c7fcf6851fdbed179a8cf29eaa5dd372d340fc51ca5f44345ea5bc0196b36c8b16179888a7c9044313420
12 | languageName: node
13 | linkType: hard
14 |
15 | "@esbuild/aix-ppc64@npm:0.21.5":
16 | version: 0.21.5
17 | resolution: "@esbuild/aix-ppc64@npm:0.21.5"
18 | conditions: os=aix & cpu=ppc64
19 | languageName: node
20 | linkType: hard
21 |
22 | "@esbuild/android-arm64@npm:0.21.5":
23 | version: 0.21.5
24 | resolution: "@esbuild/android-arm64@npm:0.21.5"
25 | conditions: os=android & cpu=arm64
26 | languageName: node
27 | linkType: hard
28 |
29 | "@esbuild/android-arm@npm:0.21.5":
30 | version: 0.21.5
31 | resolution: "@esbuild/android-arm@npm:0.21.5"
32 | conditions: os=android & cpu=arm
33 | languageName: node
34 | linkType: hard
35 |
36 | "@esbuild/android-x64@npm:0.21.5":
37 | version: 0.21.5
38 | resolution: "@esbuild/android-x64@npm:0.21.5"
39 | conditions: os=android & cpu=x64
40 | languageName: node
41 | linkType: hard
42 |
43 | "@esbuild/darwin-arm64@npm:0.21.5":
44 | version: 0.21.5
45 | resolution: "@esbuild/darwin-arm64@npm:0.21.5"
46 | conditions: os=darwin & cpu=arm64
47 | languageName: node
48 | linkType: hard
49 |
50 | "@esbuild/darwin-x64@npm:0.21.5":
51 | version: 0.21.5
52 | resolution: "@esbuild/darwin-x64@npm:0.21.5"
53 | conditions: os=darwin & cpu=x64
54 | languageName: node
55 | linkType: hard
56 |
57 | "@esbuild/freebsd-arm64@npm:0.21.5":
58 | version: 0.21.5
59 | resolution: "@esbuild/freebsd-arm64@npm:0.21.5"
60 | conditions: os=freebsd & cpu=arm64
61 | languageName: node
62 | linkType: hard
63 |
64 | "@esbuild/freebsd-x64@npm:0.21.5":
65 | version: 0.21.5
66 | resolution: "@esbuild/freebsd-x64@npm:0.21.5"
67 | conditions: os=freebsd & cpu=x64
68 | languageName: node
69 | linkType: hard
70 |
71 | "@esbuild/linux-arm64@npm:0.21.5":
72 | version: 0.21.5
73 | resolution: "@esbuild/linux-arm64@npm:0.21.5"
74 | conditions: os=linux & cpu=arm64
75 | languageName: node
76 | linkType: hard
77 |
78 | "@esbuild/linux-arm@npm:0.21.5":
79 | version: 0.21.5
80 | resolution: "@esbuild/linux-arm@npm:0.21.5"
81 | conditions: os=linux & cpu=arm
82 | languageName: node
83 | linkType: hard
84 |
85 | "@esbuild/linux-ia32@npm:0.21.5":
86 | version: 0.21.5
87 | resolution: "@esbuild/linux-ia32@npm:0.21.5"
88 | conditions: os=linux & cpu=ia32
89 | languageName: node
90 | linkType: hard
91 |
92 | "@esbuild/linux-loong64@npm:0.21.5":
93 | version: 0.21.5
94 | resolution: "@esbuild/linux-loong64@npm:0.21.5"
95 | conditions: os=linux & cpu=loong64
96 | languageName: node
97 | linkType: hard
98 |
99 | "@esbuild/linux-mips64el@npm:0.21.5":
100 | version: 0.21.5
101 | resolution: "@esbuild/linux-mips64el@npm:0.21.5"
102 | conditions: os=linux & cpu=mips64el
103 | languageName: node
104 | linkType: hard
105 |
106 | "@esbuild/linux-ppc64@npm:0.21.5":
107 | version: 0.21.5
108 | resolution: "@esbuild/linux-ppc64@npm:0.21.5"
109 | conditions: os=linux & cpu=ppc64
110 | languageName: node
111 | linkType: hard
112 |
113 | "@esbuild/linux-riscv64@npm:0.21.5":
114 | version: 0.21.5
115 | resolution: "@esbuild/linux-riscv64@npm:0.21.5"
116 | conditions: os=linux & cpu=riscv64
117 | languageName: node
118 | linkType: hard
119 |
120 | "@esbuild/linux-s390x@npm:0.21.5":
121 | version: 0.21.5
122 | resolution: "@esbuild/linux-s390x@npm:0.21.5"
123 | conditions: os=linux & cpu=s390x
124 | languageName: node
125 | linkType: hard
126 |
127 | "@esbuild/linux-x64@npm:0.21.5":
128 | version: 0.21.5
129 | resolution: "@esbuild/linux-x64@npm:0.21.5"
130 | conditions: os=linux & cpu=x64
131 | languageName: node
132 | linkType: hard
133 |
134 | "@esbuild/netbsd-x64@npm:0.21.5":
135 | version: 0.21.5
136 | resolution: "@esbuild/netbsd-x64@npm:0.21.5"
137 | conditions: os=netbsd & cpu=x64
138 | languageName: node
139 | linkType: hard
140 |
141 | "@esbuild/openbsd-x64@npm:0.21.5":
142 | version: 0.21.5
143 | resolution: "@esbuild/openbsd-x64@npm:0.21.5"
144 | conditions: os=openbsd & cpu=x64
145 | languageName: node
146 | linkType: hard
147 |
148 | "@esbuild/sunos-x64@npm:0.21.5":
149 | version: 0.21.5
150 | resolution: "@esbuild/sunos-x64@npm:0.21.5"
151 | conditions: os=sunos & cpu=x64
152 | languageName: node
153 | linkType: hard
154 |
155 | "@esbuild/win32-arm64@npm:0.21.5":
156 | version: 0.21.5
157 | resolution: "@esbuild/win32-arm64@npm:0.21.5"
158 | conditions: os=win32 & cpu=arm64
159 | languageName: node
160 | linkType: hard
161 |
162 | "@esbuild/win32-ia32@npm:0.21.5":
163 | version: 0.21.5
164 | resolution: "@esbuild/win32-ia32@npm:0.21.5"
165 | conditions: os=win32 & cpu=ia32
166 | languageName: node
167 | linkType: hard
168 |
169 | "@esbuild/win32-x64@npm:0.21.5":
170 | version: 0.21.5
171 | resolution: "@esbuild/win32-x64@npm:0.21.5"
172 | conditions: os=win32 & cpu=x64
173 | languageName: node
174 | linkType: hard
175 |
176 | "@hotwired/stimulus@npm:^3.2.2":
177 | version: 3.2.2
178 | resolution: "@hotwired/stimulus@npm:3.2.2"
179 | checksum: 3793919e353d28424f57d07e6b634bf6e8821c227acfa2a608aaa8bdfe7e80479acef12705168ca866f3c231399f4a939fb0cd089aaae04c406b3d885a3b35ea
180 | languageName: node
181 | linkType: hard
182 |
183 | "@hotwired/turbo@npm:^8.0.4":
184 | version: 8.0.4
185 | resolution: "@hotwired/turbo@npm:8.0.4"
186 | checksum: 3e7174d550adf3b9a0a4c45ff5d1b71d47c20de6ae01f646fbbaf8ed447677bd1a5fb6dbd46e95b14645a663fe58a6187c1c6d2a688b0981b5cf437379c5e00a
187 | languageName: node
188 | linkType: hard
189 |
190 | "@types/d3-scale-chromatic@npm:^3.0.0":
191 | version: 3.0.3
192 | resolution: "@types/d3-scale-chromatic@npm:3.0.3"
193 | checksum: 2f48c6f370edba485b57b73573884ded71914222a4580140ff87ee96e1d55ccd05b1d457f726e234a31269b803270ac95d5554229ab6c43c7e4a9894e20dd490
194 | languageName: node
195 | linkType: hard
196 |
197 | "@types/d3-scale@npm:^4.0.3":
198 | version: 4.0.8
199 | resolution: "@types/d3-scale@npm:4.0.8"
200 | dependencies:
201 | "@types/d3-time": "npm:*"
202 | checksum: 57de90e4016f640b83cb960b7e3a0ab3ed02e720898840ddc5105264ffcfea73336161442fdc91895377c2d2f91904d637282f16852b8535b77e15a761c8e99e
203 | languageName: node
204 | linkType: hard
205 |
206 | "@types/d3-time@npm:*":
207 | version: 3.0.3
208 | resolution: "@types/d3-time@npm:3.0.3"
209 | checksum: 245a8aadca504df27edf730de502e47a68f16ae795c86b5ca35e7afa91c133aa9ef4d08778f8cf1ed2be732f89a4105ba4b437ce2afbdfd17d3d937b6ba5f568
210 | languageName: node
211 | linkType: hard
212 |
213 | "@types/debug@npm:^4.0.0":
214 | version: 4.1.12
215 | resolution: "@types/debug@npm:4.1.12"
216 | dependencies:
217 | "@types/ms": "npm:*"
218 | checksum: 5dcd465edbb5a7f226e9a5efd1f399c6172407ef5840686b73e3608ce135eeca54ae8037dcd9f16bdb2768ac74925b820a8b9ecc588a58ca09eca6acabe33e2f
219 | languageName: node
220 | linkType: hard
221 |
222 | "@types/hotwired__turbo@npm:^8":
223 | version: 8.0.1
224 | resolution: "@types/hotwired__turbo@npm:8.0.1"
225 | checksum: 39d95c6e5293b31e63166aa6eef9e03cdd9378d840e6942a91e4ec95659af4b57ea50e5e08ebe359ca39cb2253d18d1946b2c5e069c042912eec791a5232cfe8
226 | languageName: node
227 | linkType: hard
228 |
229 | "@types/mdast@npm:^3.0.0":
230 | version: 3.0.15
231 | resolution: "@types/mdast@npm:3.0.15"
232 | dependencies:
233 | "@types/unist": "npm:^2"
234 | checksum: fcbf716c03d1ed5465deca60862e9691414f9c43597c288c7d2aefbe274552e1bbd7aeee91b88a02597e88a28c139c57863d0126fcf8416a95fdc681d054ee3d
235 | languageName: node
236 | linkType: hard
237 |
238 | "@types/ms@npm:*":
239 | version: 0.7.34
240 | resolution: "@types/ms@npm:0.7.34"
241 | checksum: ac80bd90012116ceb2d188fde62d96830ca847823e8ca71255616bc73991aa7d9f057b8bfab79e8ee44ffefb031ddd1bcce63ea82f9e66f7c31ec02d2d823ccc
242 | languageName: node
243 | linkType: hard
244 |
245 | "@types/unist@npm:^2, @types/unist@npm:^2.0.0":
246 | version: 2.0.10
247 | resolution: "@types/unist@npm:2.0.10"
248 | checksum: 5f247dc2229944355209ad5c8e83cfe29419fa7f0a6d557421b1985a1500444719cc9efcc42c652b55aab63c931813c88033e0202c1ac684bcd4829d66e44731
249 | languageName: node
250 | linkType: hard
251 |
252 | "character-entities@npm:^2.0.0":
253 | version: 2.0.2
254 | resolution: "character-entities@npm:2.0.2"
255 | checksum: b0c645a45bcc90ff24f0e0140f4875a8436b8ef13b6bcd31ec02cfb2ca502b680362aa95386f7815bdc04b6464d48cf191210b3840d7c04241a149ede591a308
256 | languageName: node
257 | linkType: hard
258 |
259 | "commander@npm:7":
260 | version: 7.2.0
261 | resolution: "commander@npm:7.2.0"
262 | checksum: 8d690ff13b0356df7e0ebbe6c59b4712f754f4b724d4f473d3cc5b3fdcf978e3a5dc3078717858a2ceb50b0f84d0660a7f22a96cdc50fb877d0c9bb31593d23a
263 | languageName: node
264 | linkType: hard
265 |
266 | "commander@npm:^8.3.0":
267 | version: 8.3.0
268 | resolution: "commander@npm:8.3.0"
269 | checksum: 8b043bb8322ea1c39664a1598a95e0495bfe4ca2fad0d84a92d7d1d8d213e2a155b441d2470c8e08de7c4a28cf2bc6e169211c49e1b21d9f7edc6ae4d9356060
270 | languageName: node
271 | linkType: hard
272 |
273 | "cose-base@npm:^1.0.0":
274 | version: 1.0.3
275 | resolution: "cose-base@npm:1.0.3"
276 | dependencies:
277 | layout-base: "npm:^1.0.0"
278 | checksum: a6e400b1d101393d6af0967c1353355777c1106c40417c5acaef6ca8bdda41e2fc9398f466d6c85be30290943ad631f2590569f67b3fd5368a0d8318946bd24f
279 | languageName: node
280 | linkType: hard
281 |
282 | "cytoscape-cose-bilkent@npm:^4.1.0":
283 | version: 4.1.0
284 | resolution: "cytoscape-cose-bilkent@npm:4.1.0"
285 | dependencies:
286 | cose-base: "npm:^1.0.0"
287 | peerDependencies:
288 | cytoscape: ^3.2.0
289 | checksum: 5e2480ddba9da1a68e700ed2c674cbfd51e9efdbd55788f1971a68de4eb30708e3b3a5e808bf5628f7a258680406bbe6586d87a9133e02a9bdc1ab1a92f512f2
290 | languageName: node
291 | linkType: hard
292 |
293 | "cytoscape@npm:^3.28.1":
294 | version: 3.30.0
295 | resolution: "cytoscape@npm:3.30.0"
296 | checksum: ffd463f27975b7d979c6f1a8c3472d95cbe2d75dc820aac27ad3e253eaa877da4c7ed4632a341394d911ba1804c5786a292a56387bbb34fcf0c9db69286e123e
297 | languageName: node
298 | linkType: hard
299 |
300 | "d3-array@npm:1 - 2":
301 | version: 2.12.1
302 | resolution: "d3-array@npm:2.12.1"
303 | dependencies:
304 | internmap: "npm:^1.0.0"
305 | checksum: 7eca10427a9f113a4ca6a0f7301127cab26043fd5e362631ef5a0edd1c4b2dd70c56ed317566700c31e4a6d88b55f3951aaba192291817f243b730cb2352882e
306 | languageName: node
307 | linkType: hard
308 |
309 | "d3-array@npm:2 - 3, d3-array@npm:2.10.0 - 3, d3-array@npm:2.5.0 - 3, d3-array@npm:3, d3-array@npm:^3.2.0":
310 | version: 3.2.4
311 | resolution: "d3-array@npm:3.2.4"
312 | dependencies:
313 | internmap: "npm:1 - 2"
314 | checksum: 08b95e91130f98c1375db0e0af718f4371ccacef7d5d257727fe74f79a24383e79aba280b9ffae655483ffbbad4fd1dec4ade0119d88c4749f388641c8bf8c50
315 | languageName: node
316 | linkType: hard
317 |
318 | "d3-axis@npm:3":
319 | version: 3.0.0
320 | resolution: "d3-axis@npm:3.0.0"
321 | checksum: a271e70ba1966daa5aaf6a7f959ceca3e12997b43297e757c7b945db2e1ead3c6ee226f2abcfa22abbd4e2e28bd2b71a0911794c4e5b911bbba271328a582c78
322 | languageName: node
323 | linkType: hard
324 |
325 | "d3-brush@npm:3":
326 | version: 3.0.0
327 | resolution: "d3-brush@npm:3.0.0"
328 | dependencies:
329 | d3-dispatch: "npm:1 - 3"
330 | d3-drag: "npm:2 - 3"
331 | d3-interpolate: "npm:1 - 3"
332 | d3-selection: "npm:3"
333 | d3-transition: "npm:3"
334 | checksum: 07baf00334c576da2f68a91fc0da5732c3a5fa19bd3d7aed7fd24d1d674a773f71a93e9687c154176f7246946194d77c48c2d8fed757f5dcb1a4740067ec50a8
335 | languageName: node
336 | linkType: hard
337 |
338 | "d3-chord@npm:3":
339 | version: 3.0.1
340 | resolution: "d3-chord@npm:3.0.1"
341 | dependencies:
342 | d3-path: "npm:1 - 3"
343 | checksum: baa6013914af3f4fe1521f0d16de31a38eb8a71d08ff1dec4741f6f45a828661e5cd3935e39bd14e3032bdc78206c283ca37411da21d46ec3cfc520be6e7a7ce
344 | languageName: node
345 | linkType: hard
346 |
347 | "d3-color@npm:1 - 3, d3-color@npm:3":
348 | version: 3.1.0
349 | resolution: "d3-color@npm:3.1.0"
350 | checksum: a4e20e1115fa696fce041fbe13fbc80dc4c19150fa72027a7c128ade980bc0eeeba4bcf28c9e21f0bce0e0dbfe7ca5869ef67746541dcfda053e4802ad19783c
351 | languageName: node
352 | linkType: hard
353 |
354 | "d3-contour@npm:4":
355 | version: 4.0.2
356 | resolution: "d3-contour@npm:4.0.2"
357 | dependencies:
358 | d3-array: "npm:^3.2.0"
359 | checksum: 98bc5fbed6009e08707434a952076f39f1cd6ed8b9288253cc3e6a3286e4e80c63c62d84954b20e64bf6e4ededcc69add54d3db25e990784a59c04edd3449032
360 | languageName: node
361 | linkType: hard
362 |
363 | "d3-delaunay@npm:6":
364 | version: 6.0.4
365 | resolution: "d3-delaunay@npm:6.0.4"
366 | dependencies:
367 | delaunator: "npm:5"
368 | checksum: 57c3aecd2525664b07c4c292aa11cf49b2752c0cf3f5257f752999399fe3c592de2d418644d79df1f255471eec8057a9cc0c3062ed7128cb3348c45f69597754
369 | languageName: node
370 | linkType: hard
371 |
372 | "d3-dispatch@npm:1 - 3, d3-dispatch@npm:3":
373 | version: 3.0.1
374 | resolution: "d3-dispatch@npm:3.0.1"
375 | checksum: 6eca77008ce2dc33380e45d4410c67d150941df7ab45b91d116dbe6d0a3092c0f6ac184dd4602c796dc9e790222bad3ff7142025f5fd22694efe088d1d941753
376 | languageName: node
377 | linkType: hard
378 |
379 | "d3-drag@npm:2 - 3, d3-drag@npm:3":
380 | version: 3.0.0
381 | resolution: "d3-drag@npm:3.0.0"
382 | dependencies:
383 | d3-dispatch: "npm:1 - 3"
384 | d3-selection: "npm:3"
385 | checksum: d2556e8dc720741a443b595a30af403dd60642dfd938d44d6e9bfc4c71a962142f9a028c56b61f8b4790b65a34acad177d1263d66f103c3c527767b0926ef5aa
386 | languageName: node
387 | linkType: hard
388 |
389 | "d3-dsv@npm:1 - 3, d3-dsv@npm:3":
390 | version: 3.0.1
391 | resolution: "d3-dsv@npm:3.0.1"
392 | dependencies:
393 | commander: "npm:7"
394 | iconv-lite: "npm:0.6"
395 | rw: "npm:1"
396 | bin:
397 | csv2json: bin/dsv2json.js
398 | csv2tsv: bin/dsv2dsv.js
399 | dsv2dsv: bin/dsv2dsv.js
400 | dsv2json: bin/dsv2json.js
401 | json2csv: bin/json2dsv.js
402 | json2dsv: bin/json2dsv.js
403 | json2tsv: bin/json2dsv.js
404 | tsv2csv: bin/dsv2dsv.js
405 | tsv2json: bin/dsv2json.js
406 | checksum: 10e6af9e331950ed258f34ab49ac1b7060128ef81dcf32afc790bd1f7e8c3cc2aac7f5f875250a83f21f39bb5925fbd0872bb209f8aca32b3b77d32bab8a65ab
407 | languageName: node
408 | linkType: hard
409 |
410 | "d3-ease@npm:1 - 3, d3-ease@npm:3":
411 | version: 3.0.1
412 | resolution: "d3-ease@npm:3.0.1"
413 | checksum: fec8ef826c0cc35cda3092c6841e07672868b1839fcaf556e19266a3a37e6bc7977d8298c0fcb9885e7799bfdcef7db1baaba9cd4dcf4bc5e952cf78574a88b0
414 | languageName: node
415 | linkType: hard
416 |
417 | "d3-fetch@npm:3":
418 | version: 3.0.1
419 | resolution: "d3-fetch@npm:3.0.1"
420 | dependencies:
421 | d3-dsv: "npm:1 - 3"
422 | checksum: 4f467a79bf290395ac0cbb5f7562483f6a18668adc4c8eb84c9d3eff048b6f6d3b6f55079ba1ebf1908dabe000c941d46be447f8d78453b2dad5fb59fb6aa93b
423 | languageName: node
424 | linkType: hard
425 |
426 | "d3-force@npm:3":
427 | version: 3.0.0
428 | resolution: "d3-force@npm:3.0.0"
429 | dependencies:
430 | d3-dispatch: "npm:1 - 3"
431 | d3-quadtree: "npm:1 - 3"
432 | d3-timer: "npm:1 - 3"
433 | checksum: 220a16a1a1ac62ba56df61028896e4b52be89c81040d20229c876efc8852191482c233f8a52bb5a4e0875c321b8e5cb6413ef3dfa4d8fe79eeb7d52c587f52cf
434 | languageName: node
435 | linkType: hard
436 |
437 | "d3-format@npm:1 - 3, d3-format@npm:3":
438 | version: 3.1.0
439 | resolution: "d3-format@npm:3.1.0"
440 | checksum: 049f5c0871ebce9859fc5e2f07f336b3c5bfff52a2540e0bac7e703fce567cd9346f4ad1079dd18d6f1e0eaa0599941c1810898926f10ac21a31fd0a34b4aa75
441 | languageName: node
442 | linkType: hard
443 |
444 | "d3-geo@npm:3":
445 | version: 3.1.1
446 | resolution: "d3-geo@npm:3.1.1"
447 | dependencies:
448 | d3-array: "npm:2.5.0 - 3"
449 | checksum: d32270dd2dc8ac3ea63e8805d63239c4c8ec6c0d339d73b5e5a30a87f8f54db22a78fb434369799465eae169503b25f9a107c642c8a16c32a3285bc0e6d8e8c1
450 | languageName: node
451 | linkType: hard
452 |
453 | "d3-hierarchy@npm:3":
454 | version: 3.1.2
455 | resolution: "d3-hierarchy@npm:3.1.2"
456 | checksum: 6dcdb480539644aa7fc0d72dfc7b03f99dfbcdf02714044e8c708577e0d5981deb9d3e99bbbb2d26422b55bcc342ac89a0fa2ea6c9d7302e2fc0951dd96f89cf
457 | languageName: node
458 | linkType: hard
459 |
460 | "d3-interpolate@npm:1 - 3, d3-interpolate@npm:1.2.0 - 3, d3-interpolate@npm:3":
461 | version: 3.0.1
462 | resolution: "d3-interpolate@npm:3.0.1"
463 | dependencies:
464 | d3-color: "npm:1 - 3"
465 | checksum: 19f4b4daa8d733906671afff7767c19488f51a43d251f8b7f484d5d3cfc36c663f0a66c38fe91eee30f40327443d799be17169f55a293a3ba949e84e57a33e6a
466 | languageName: node
467 | linkType: hard
468 |
469 | "d3-path@npm:1":
470 | version: 1.0.9
471 | resolution: "d3-path@npm:1.0.9"
472 | checksum: e35e84df5abc18091f585725b8235e1fa97efc287571585427d3a3597301e6c506dea56b11dfb3c06ca5858b3eb7f02c1bf4f6a716aa9eade01c41b92d497eb5
473 | languageName: node
474 | linkType: hard
475 |
476 | "d3-path@npm:1 - 3, d3-path@npm:3, d3-path@npm:^3.1.0":
477 | version: 3.1.0
478 | resolution: "d3-path@npm:3.1.0"
479 | checksum: dc1d58ec87fa8319bd240cf7689995111a124b141428354e9637aa83059eb12e681f77187e0ada5dedfce346f7e3d1f903467ceb41b379bfd01cd8e31721f5da
480 | languageName: node
481 | linkType: hard
482 |
483 | "d3-polygon@npm:3":
484 | version: 3.0.1
485 | resolution: "d3-polygon@npm:3.0.1"
486 | checksum: e236aa7f33efa9a4072907af7dc119f85b150a0716759d4fe5f12f62573018264a6cbde8617fbfa6944a7ae48c1c0c8d3f39ae72e11f66dd471e9b5e668385df
487 | languageName: node
488 | linkType: hard
489 |
490 | "d3-quadtree@npm:1 - 3, d3-quadtree@npm:3":
491 | version: 3.0.1
492 | resolution: "d3-quadtree@npm:3.0.1"
493 | checksum: 18302d2548bfecaef788152397edec95a76400fd97d9d7f42a089ceb68d910f685c96579d74e3712d57477ed042b056881b47cd836a521de683c66f47ce89090
494 | languageName: node
495 | linkType: hard
496 |
497 | "d3-random@npm:3":
498 | version: 3.0.1
499 | resolution: "d3-random@npm:3.0.1"
500 | checksum: 987a1a1bcbf26e6cf01fd89d5a265b463b2cea93560fc17d9b1c45e8ed6ff2db5924601bcceb808de24c94133f000039eb7fa1c469a7a844ccbf1170cbb25b41
501 | languageName: node
502 | linkType: hard
503 |
504 | "d3-sankey@npm:^0.12.3":
505 | version: 0.12.3
506 | resolution: "d3-sankey@npm:0.12.3"
507 | dependencies:
508 | d3-array: "npm:1 - 2"
509 | d3-shape: "npm:^1.2.0"
510 | checksum: 261debb01a13269f6fc53b9ebaef174a015d5ad646242c23995bf514498829ab8b8f920a7873724a7494288b46bea3ce7ebc5a920b745bc8ae4caa5885cf5204
511 | languageName: node
512 | linkType: hard
513 |
514 | "d3-scale-chromatic@npm:3":
515 | version: 3.1.0
516 | resolution: "d3-scale-chromatic@npm:3.1.0"
517 | dependencies:
518 | d3-color: "npm:1 - 3"
519 | d3-interpolate: "npm:1 - 3"
520 | checksum: 9a3f4671ab0b971f4a411b42180d7cf92bfe8e8584e637ce7e698d705e18d6d38efbd20ec64f60cc0dfe966c20d40fc172565bc28aaa2990c0a006360eed91af
521 | languageName: node
522 | linkType: hard
523 |
524 | "d3-scale@npm:4":
525 | version: 4.0.2
526 | resolution: "d3-scale@npm:4.0.2"
527 | dependencies:
528 | d3-array: "npm:2.10.0 - 3"
529 | d3-format: "npm:1 - 3"
530 | d3-interpolate: "npm:1.2.0 - 3"
531 | d3-time: "npm:2.1.1 - 3"
532 | d3-time-format: "npm:2 - 4"
533 | checksum: 65d9ad8c2641aec30ed5673a7410feb187a224d6ca8d1a520d68a7d6eac9d04caedbff4713d1e8545be33eb7fec5739983a7ab1d22d4e5ad35368c6729d362f1
534 | languageName: node
535 | linkType: hard
536 |
537 | "d3-selection@npm:2 - 3, d3-selection@npm:3":
538 | version: 3.0.0
539 | resolution: "d3-selection@npm:3.0.0"
540 | checksum: e59096bbe8f0cb0daa1001d9bdd6dbc93a688019abc97d1d8b37f85cd3c286a6875b22adea0931b0c88410d025563e1643019161a883c516acf50c190a11b56b
541 | languageName: node
542 | linkType: hard
543 |
544 | "d3-shape@npm:3":
545 | version: 3.2.0
546 | resolution: "d3-shape@npm:3.2.0"
547 | dependencies:
548 | d3-path: "npm:^3.1.0"
549 | checksum: f1c9d1f09926daaf6f6193ae3b4c4b5521e81da7d8902d24b38694517c7f527ce3c9a77a9d3a5722ad1e3ff355860b014557b450023d66a944eabf8cfde37132
550 | languageName: node
551 | linkType: hard
552 |
553 | "d3-shape@npm:^1.2.0":
554 | version: 1.3.7
555 | resolution: "d3-shape@npm:1.3.7"
556 | dependencies:
557 | d3-path: "npm:1"
558 | checksum: 548057ce59959815decb449f15632b08e2a1bdce208f9a37b5f98ec7629dda986c2356bc7582308405ce68aedae7d47b324df41507404df42afaf352907577ae
559 | languageName: node
560 | linkType: hard
561 |
562 | "d3-time-format@npm:2 - 4, d3-time-format@npm:4":
563 | version: 4.1.0
564 | resolution: "d3-time-format@npm:4.1.0"
565 | dependencies:
566 | d3-time: "npm:1 - 3"
567 | checksum: 735e00fb25a7fd5d418fac350018713ae394eefddb0d745fab12bbff0517f9cdb5f807c7bbe87bb6eeb06249662f8ea84fec075f7d0cd68609735b2ceb29d206
568 | languageName: node
569 | linkType: hard
570 |
571 | "d3-time@npm:1 - 3, d3-time@npm:2.1.1 - 3, d3-time@npm:3":
572 | version: 3.1.0
573 | resolution: "d3-time@npm:3.1.0"
574 | dependencies:
575 | d3-array: "npm:2 - 3"
576 | checksum: a984f77e1aaeaa182679b46fbf57eceb6ebdb5f67d7578d6f68ef933f8eeb63737c0949991618a8d29472dbf43736c7d7f17c452b2770f8c1271191cba724ca1
577 | languageName: node
578 | linkType: hard
579 |
580 | "d3-timer@npm:1 - 3, d3-timer@npm:3":
581 | version: 3.0.1
582 | resolution: "d3-timer@npm:3.0.1"
583 | checksum: d4c63cb4bb5461d7038aac561b097cd1c5673969b27cbdd0e87fa48d9300a538b9e6f39b4a7f0e3592ef4f963d858c8a9f0e92754db73116770856f2fc04561a
584 | languageName: node
585 | linkType: hard
586 |
587 | "d3-transition@npm:2 - 3, d3-transition@npm:3":
588 | version: 3.0.1
589 | resolution: "d3-transition@npm:3.0.1"
590 | dependencies:
591 | d3-color: "npm:1 - 3"
592 | d3-dispatch: "npm:1 - 3"
593 | d3-ease: "npm:1 - 3"
594 | d3-interpolate: "npm:1 - 3"
595 | d3-timer: "npm:1 - 3"
596 | peerDependencies:
597 | d3-selection: 2 - 3
598 | checksum: 4e74535dda7024aa43e141635b7522bb70cf9d3dfefed975eb643b36b864762eca67f88fafc2ca798174f83ca7c8a65e892624f824b3f65b8145c6a1a88dbbad
599 | languageName: node
600 | linkType: hard
601 |
602 | "d3-zoom@npm:3":
603 | version: 3.0.0
604 | resolution: "d3-zoom@npm:3.0.0"
605 | dependencies:
606 | d3-dispatch: "npm:1 - 3"
607 | d3-drag: "npm:2 - 3"
608 | d3-interpolate: "npm:1 - 3"
609 | d3-selection: "npm:2 - 3"
610 | d3-transition: "npm:2 - 3"
611 | checksum: ee2036479049e70d8c783d594c444fe00e398246048e3f11a59755cd0e21de62ece3126181b0d7a31bf37bcf32fd726f83ae7dea4495ff86ec7736ce5ad36fd3
612 | languageName: node
613 | linkType: hard
614 |
615 | "d3@npm:^7.4.0, d3@npm:^7.8.2":
616 | version: 7.9.0
617 | resolution: "d3@npm:7.9.0"
618 | dependencies:
619 | d3-array: "npm:3"
620 | d3-axis: "npm:3"
621 | d3-brush: "npm:3"
622 | d3-chord: "npm:3"
623 | d3-color: "npm:3"
624 | d3-contour: "npm:4"
625 | d3-delaunay: "npm:6"
626 | d3-dispatch: "npm:3"
627 | d3-drag: "npm:3"
628 | d3-dsv: "npm:3"
629 | d3-ease: "npm:3"
630 | d3-fetch: "npm:3"
631 | d3-force: "npm:3"
632 | d3-format: "npm:3"
633 | d3-geo: "npm:3"
634 | d3-hierarchy: "npm:3"
635 | d3-interpolate: "npm:3"
636 | d3-path: "npm:3"
637 | d3-polygon: "npm:3"
638 | d3-quadtree: "npm:3"
639 | d3-random: "npm:3"
640 | d3-scale: "npm:4"
641 | d3-scale-chromatic: "npm:3"
642 | d3-selection: "npm:3"
643 | d3-shape: "npm:3"
644 | d3-time: "npm:3"
645 | d3-time-format: "npm:4"
646 | d3-timer: "npm:3"
647 | d3-transition: "npm:3"
648 | d3-zoom: "npm:3"
649 | checksum: 3dd9c08c73cfaa69c70c49e603c85e049c3904664d9c79a1a52a0f52795828a1ff23592dc9a7b2257e711d68a615472a13103c212032f38e016d609796e087e8
650 | languageName: node
651 | linkType: hard
652 |
653 | "dagre-d3-es@npm:7.0.10":
654 | version: 7.0.10
655 | resolution: "dagre-d3-es@npm:7.0.10"
656 | dependencies:
657 | d3: "npm:^7.8.2"
658 | lodash-es: "npm:^4.17.21"
659 | checksum: 3e1bb6efe9a78cea3fe6ff265eb330692f057bf84c99d6a1d67db379231c37a1a1ca2e1ccc25a732ddf924cd5566062c033d88defd230debec324dc9256c6775
660 | languageName: node
661 | linkType: hard
662 |
663 | "dayjs@npm:^1.11.7":
664 | version: 1.11.11
665 | resolution: "dayjs@npm:1.11.11"
666 | checksum: 0131d10516b9945f05a57e13f4af49a6814de5573a494824e103131a3bbe4cc470b1aefe8e17e51f9a478a22cd116084be1ee5725cedb66ec4c3f9091202dc4b
667 | languageName: node
668 | linkType: hard
669 |
670 | "debug@npm:^4.0.0":
671 | version: 4.3.5
672 | resolution: "debug@npm:4.3.5"
673 | dependencies:
674 | ms: "npm:2.1.2"
675 | peerDependenciesMeta:
676 | supports-color:
677 | optional: true
678 | checksum: 082c375a2bdc4f4469c99f325ff458adad62a3fc2c482d59923c260cb08152f34e2659f72b3767db8bb2f21ca81a60a42d1019605a412132d7b9f59363a005cc
679 | languageName: node
680 | linkType: hard
681 |
682 | "decode-named-character-reference@npm:^1.0.0":
683 | version: 1.0.2
684 | resolution: "decode-named-character-reference@npm:1.0.2"
685 | dependencies:
686 | character-entities: "npm:^2.0.0"
687 | checksum: 66a9fc5d9b5385a2b3675c69ba0d8e893393d64057f7dbbb585265bb4fc05ec513d76943b8e5aac7d8016d20eea4499322cbf4cd6d54b466976b78f3a7587a4c
688 | languageName: node
689 | linkType: hard
690 |
691 | "delaunator@npm:5":
692 | version: 5.0.1
693 | resolution: "delaunator@npm:5.0.1"
694 | dependencies:
695 | robust-predicates: "npm:^3.0.2"
696 | checksum: 3d7ea4d964731c5849af33fec0a271bc6753487b331fd7d43ccb17d77834706e1c383e6ab8fda0032da955e7576d1083b9603cdaf9cbdfd6b3ebd1fb8bb675a5
697 | languageName: node
698 | linkType: hard
699 |
700 | "dequal@npm:^2.0.0":
701 | version: 2.0.3
702 | resolution: "dequal@npm:2.0.3"
703 | checksum: f98860cdf58b64991ae10205137c0e97d384c3a4edc7f807603887b7c4b850af1224a33d88012009f150861cbee4fa2d322c4cc04b9313bee312e47f6ecaa888
704 | languageName: node
705 | linkType: hard
706 |
707 | "diff@npm:^5.0.0":
708 | version: 5.2.0
709 | resolution: "diff@npm:5.2.0"
710 | checksum: aed0941f206fe261ecb258dc8d0ceea8abbde3ace5827518ff8d302f0fc9cc81ce116c4d8f379151171336caf0516b79e01abdc1ed1201b6440d895a66689eb4
711 | languageName: node
712 | linkType: hard
713 |
714 | "dompurify@npm:^3.0.5":
715 | version: 3.1.5
716 | resolution: "dompurify@npm:3.1.5"
717 | checksum: 8227fb1328c02d94f823de8cd499fca5ee07051e03e6b52bce06b592348aeb6e56ea8e2a4b0a9cfaeac7d623e4b5a52e4326aedf72596a6c2510b88dfcf2a2b6
718 | languageName: node
719 | linkType: hard
720 |
721 | "elkjs@npm:^0.9.0":
722 | version: 0.9.3
723 | resolution: "elkjs@npm:0.9.3"
724 | checksum: caf544ff4fce8442d1d3dd6dface176c9b2fe26fc1e34f56122828e6eef7d2d7fe70d3202f9f3ecf0feb6287d4c8430949f483e63e450a7454bb39ccffab3808
725 | languageName: node
726 | linkType: hard
727 |
728 | "esbuild@npm:^0.21.5":
729 | version: 0.21.5
730 | resolution: "esbuild@npm:0.21.5"
731 | dependencies:
732 | "@esbuild/aix-ppc64": "npm:0.21.5"
733 | "@esbuild/android-arm": "npm:0.21.5"
734 | "@esbuild/android-arm64": "npm:0.21.5"
735 | "@esbuild/android-x64": "npm:0.21.5"
736 | "@esbuild/darwin-arm64": "npm:0.21.5"
737 | "@esbuild/darwin-x64": "npm:0.21.5"
738 | "@esbuild/freebsd-arm64": "npm:0.21.5"
739 | "@esbuild/freebsd-x64": "npm:0.21.5"
740 | "@esbuild/linux-arm": "npm:0.21.5"
741 | "@esbuild/linux-arm64": "npm:0.21.5"
742 | "@esbuild/linux-ia32": "npm:0.21.5"
743 | "@esbuild/linux-loong64": "npm:0.21.5"
744 | "@esbuild/linux-mips64el": "npm:0.21.5"
745 | "@esbuild/linux-ppc64": "npm:0.21.5"
746 | "@esbuild/linux-riscv64": "npm:0.21.5"
747 | "@esbuild/linux-s390x": "npm:0.21.5"
748 | "@esbuild/linux-x64": "npm:0.21.5"
749 | "@esbuild/netbsd-x64": "npm:0.21.5"
750 | "@esbuild/openbsd-x64": "npm:0.21.5"
751 | "@esbuild/sunos-x64": "npm:0.21.5"
752 | "@esbuild/win32-arm64": "npm:0.21.5"
753 | "@esbuild/win32-ia32": "npm:0.21.5"
754 | "@esbuild/win32-x64": "npm:0.21.5"
755 | dependenciesMeta:
756 | "@esbuild/aix-ppc64":
757 | optional: true
758 | "@esbuild/android-arm":
759 | optional: true
760 | "@esbuild/android-arm64":
761 | optional: true
762 | "@esbuild/android-x64":
763 | optional: true
764 | "@esbuild/darwin-arm64":
765 | optional: true
766 | "@esbuild/darwin-x64":
767 | optional: true
768 | "@esbuild/freebsd-arm64":
769 | optional: true
770 | "@esbuild/freebsd-x64":
771 | optional: true
772 | "@esbuild/linux-arm":
773 | optional: true
774 | "@esbuild/linux-arm64":
775 | optional: true
776 | "@esbuild/linux-ia32":
777 | optional: true
778 | "@esbuild/linux-loong64":
779 | optional: true
780 | "@esbuild/linux-mips64el":
781 | optional: true
782 | "@esbuild/linux-ppc64":
783 | optional: true
784 | "@esbuild/linux-riscv64":
785 | optional: true
786 | "@esbuild/linux-s390x":
787 | optional: true
788 | "@esbuild/linux-x64":
789 | optional: true
790 | "@esbuild/netbsd-x64":
791 | optional: true
792 | "@esbuild/openbsd-x64":
793 | optional: true
794 | "@esbuild/sunos-x64":
795 | optional: true
796 | "@esbuild/win32-arm64":
797 | optional: true
798 | "@esbuild/win32-ia32":
799 | optional: true
800 | "@esbuild/win32-x64":
801 | optional: true
802 | bin:
803 | esbuild: bin/esbuild
804 | checksum: fa08508adf683c3f399e8a014a6382a6b65542213431e26206c0720e536b31c09b50798747c2a105a4bbba1d9767b8d3615a74c2f7bf1ddf6d836cd11eb672de
805 | languageName: node
806 | linkType: hard
807 |
808 | "iconv-lite@npm:0.6":
809 | version: 0.6.3
810 | resolution: "iconv-lite@npm:0.6.3"
811 | dependencies:
812 | safer-buffer: "npm:>= 2.1.2 < 3.0.0"
813 | checksum: 98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1
814 | languageName: node
815 | linkType: hard
816 |
817 | "internmap@npm:1 - 2":
818 | version: 2.0.3
819 | resolution: "internmap@npm:2.0.3"
820 | checksum: 8cedd57f07bbc22501516fbfc70447f0c6812871d471096fad9ea603516eacc2137b633633daf432c029712df0baefd793686388ddf5737e3ea15074b877f7ed
821 | languageName: node
822 | linkType: hard
823 |
824 | "internmap@npm:^1.0.0":
825 | version: 1.0.1
826 | resolution: "internmap@npm:1.0.1"
827 | checksum: 60942be815ca19da643b6d4f23bd0bf4e8c97abbd080fb963fe67583b60bdfb3530448ad4486bae40810e92317bded9995cc31411218acc750d72cd4e8646eee
828 | languageName: node
829 | linkType: hard
830 |
831 | "katex@npm:^0.16.9":
832 | version: 0.16.10
833 | resolution: "katex@npm:0.16.10"
834 | dependencies:
835 | commander: "npm:^8.3.0"
836 | bin:
837 | katex: cli.js
838 | checksum: b465213157e5245bbb31ff6563c33ae81807c06d6f2246325b3a2397497e8929a34eebbb262f5e0991ec00fbc0cc85f388246e6dfc38ec86c28d3e481cb70afa
839 | languageName: node
840 | linkType: hard
841 |
842 | "khroma@npm:^2.0.0":
843 | version: 2.1.0
844 | resolution: "khroma@npm:2.1.0"
845 | checksum: 634d98753ff5d2540491cafeb708fc98de0d43f4e6795256d5c8f6e3ad77de93049ea41433928fda3697adf7bbe6fe27351858f6d23b78f8b5775ef314c59891
846 | languageName: node
847 | linkType: hard
848 |
849 | "kleur@npm:^4.0.3":
850 | version: 4.1.5
851 | resolution: "kleur@npm:4.1.5"
852 | checksum: e9de6cb49657b6fa70ba2d1448fd3d691a5c4370d8f7bbf1c2f64c24d461270f2117e1b0afe8cb3114f13bbd8e51de158c2a224953960331904e636a5e4c0f2a
853 | languageName: node
854 | linkType: hard
855 |
856 | "layout-base@npm:^1.0.0":
857 | version: 1.0.2
858 | resolution: "layout-base@npm:1.0.2"
859 | checksum: 2a55d0460fd9f6ed53d7e301b9eb3dea19bda03815d616a40665ce6dc75c1f4d62e1ca19a897da1cfaf6de1b91de59cd6f2f79ba1258f3d7fccc7d46ca7f3337
860 | languageName: node
861 | linkType: hard
862 |
863 | "llmops-handbook@workspace:.":
864 | version: 0.0.0-use.local
865 | resolution: "llmops-handbook@workspace:."
866 | dependencies:
867 | "@hotwired/stimulus": "npm:^3.2.2"
868 | "@hotwired/turbo": "npm:^8.0.4"
869 | "@types/hotwired__turbo": "npm:^8"
870 | esbuild: "npm:^0.21.5"
871 | mermaid: "npm:^10.9.1"
872 | tslib: "npm:^2.6.3"
873 | typescript: "npm:^5.5.2"
874 | languageName: unknown
875 | linkType: soft
876 |
877 | "lodash-es@npm:^4.17.21":
878 | version: 4.17.21
879 | resolution: "lodash-es@npm:4.17.21"
880 | checksum: fb407355f7e6cd523a9383e76e6b455321f0f153a6c9625e21a8827d10c54c2a2341bd2ae8d034358b60e07325e1330c14c224ff582d04612a46a4f0479ff2f2
881 | languageName: node
882 | linkType: hard
883 |
884 | "mdast-util-from-markdown@npm:^1.3.0":
885 | version: 1.3.1
886 | resolution: "mdast-util-from-markdown@npm:1.3.1"
887 | dependencies:
888 | "@types/mdast": "npm:^3.0.0"
889 | "@types/unist": "npm:^2.0.0"
890 | decode-named-character-reference: "npm:^1.0.0"
891 | mdast-util-to-string: "npm:^3.1.0"
892 | micromark: "npm:^3.0.0"
893 | micromark-util-decode-numeric-character-reference: "npm:^1.0.0"
894 | micromark-util-decode-string: "npm:^1.0.0"
895 | micromark-util-normalize-identifier: "npm:^1.0.0"
896 | micromark-util-symbol: "npm:^1.0.0"
897 | micromark-util-types: "npm:^1.0.0"
898 | unist-util-stringify-position: "npm:^3.0.0"
899 | uvu: "npm:^0.5.0"
900 | checksum: f4e901bf2a2e93fe35a339e0cff581efacce2f7117cd5652e9a270847bd7e2508b3e717b7b4156af54d4f896d63033e06ff9fafbf59a1d46fe17dd5e2a3f7846
901 | languageName: node
902 | linkType: hard
903 |
904 | "mdast-util-to-string@npm:^3.1.0":
905 | version: 3.2.0
906 | resolution: "mdast-util-to-string@npm:3.2.0"
907 | dependencies:
908 | "@types/mdast": "npm:^3.0.0"
909 | checksum: 112f4bf0f6758dcb95deffdcf37afba7eaecdfe2ee13252de031723094d4d55220e147326690a8b91244758e2d678e7aeb1fdd0fa6ef3317c979bc42effd9a21
910 | languageName: node
911 | linkType: hard
912 |
913 | "mermaid@npm:^10.9.1":
914 | version: 10.9.1
915 | resolution: "mermaid@npm:10.9.1"
916 | dependencies:
917 | "@braintree/sanitize-url": "npm:^6.0.1"
918 | "@types/d3-scale": "npm:^4.0.3"
919 | "@types/d3-scale-chromatic": "npm:^3.0.0"
920 | cytoscape: "npm:^3.28.1"
921 | cytoscape-cose-bilkent: "npm:^4.1.0"
922 | d3: "npm:^7.4.0"
923 | d3-sankey: "npm:^0.12.3"
924 | dagre-d3-es: "npm:7.0.10"
925 | dayjs: "npm:^1.11.7"
926 | dompurify: "npm:^3.0.5"
927 | elkjs: "npm:^0.9.0"
928 | katex: "npm:^0.16.9"
929 | khroma: "npm:^2.0.0"
930 | lodash-es: "npm:^4.17.21"
931 | mdast-util-from-markdown: "npm:^1.3.0"
932 | non-layered-tidy-tree-layout: "npm:^2.0.2"
933 | stylis: "npm:^4.1.3"
934 | ts-dedent: "npm:^2.2.0"
935 | uuid: "npm:^9.0.0"
936 | web-worker: "npm:^1.2.0"
937 | checksum: 034f326682e3e478e4bd85e418cfef00773db4432301b858247c8d4bf813e67fa1901e8548fc490eafe4c9c215c9fb96dead73007ff317ee99973cf4f63c8791
938 | languageName: node
939 | linkType: hard
940 |
941 | "micromark-core-commonmark@npm:^1.0.1":
942 | version: 1.1.0
943 | resolution: "micromark-core-commonmark@npm:1.1.0"
944 | dependencies:
945 | decode-named-character-reference: "npm:^1.0.0"
946 | micromark-factory-destination: "npm:^1.0.0"
947 | micromark-factory-label: "npm:^1.0.0"
948 | micromark-factory-space: "npm:^1.0.0"
949 | micromark-factory-title: "npm:^1.0.0"
950 | micromark-factory-whitespace: "npm:^1.0.0"
951 | micromark-util-character: "npm:^1.0.0"
952 | micromark-util-chunked: "npm:^1.0.0"
953 | micromark-util-classify-character: "npm:^1.0.0"
954 | micromark-util-html-tag-name: "npm:^1.0.0"
955 | micromark-util-normalize-identifier: "npm:^1.0.0"
956 | micromark-util-resolve-all: "npm:^1.0.0"
957 | micromark-util-subtokenize: "npm:^1.0.0"
958 | micromark-util-symbol: "npm:^1.0.0"
959 | micromark-util-types: "npm:^1.0.1"
960 | uvu: "npm:^0.5.0"
961 | checksum: b3bf7b7004ce7dbb3ae151dcca4db1d12546f1b943affb2418da4b90b9ce59357373c433ee2eea4c868aee0791dafa355aeed19f5ef2b0acaf271f32f1ecbe6a
962 | languageName: node
963 | linkType: hard
964 |
965 | "micromark-factory-destination@npm:^1.0.0":
966 | version: 1.1.0
967 | resolution: "micromark-factory-destination@npm:1.1.0"
968 | dependencies:
969 | micromark-util-character: "npm:^1.0.0"
970 | micromark-util-symbol: "npm:^1.0.0"
971 | micromark-util-types: "npm:^1.0.0"
972 | checksum: 71ebd9089bf0c9689b98ef42215c04032ae2701ae08c3546b663628553255dca18e5310dbdacddad3acd8de4f12a789835fff30dadc4da3c4e30387a75e6b488
973 | languageName: node
974 | linkType: hard
975 |
976 | "micromark-factory-label@npm:^1.0.0":
977 | version: 1.1.0
978 | resolution: "micromark-factory-label@npm:1.1.0"
979 | dependencies:
980 | micromark-util-character: "npm:^1.0.0"
981 | micromark-util-symbol: "npm:^1.0.0"
982 | micromark-util-types: "npm:^1.0.0"
983 | uvu: "npm:^0.5.0"
984 | checksum: 5e2cd2d8214bb92a34dfcedf9c7aecf565e3648650a3a6a0495ededf15f2318dd214dc069e3026402792cd5839d395313f8ef9c2e86ca34a8facaa0f75a77753
985 | languageName: node
986 | linkType: hard
987 |
988 | "micromark-factory-space@npm:^1.0.0":
989 | version: 1.1.0
990 | resolution: "micromark-factory-space@npm:1.1.0"
991 | dependencies:
992 | micromark-util-character: "npm:^1.0.0"
993 | micromark-util-types: "npm:^1.0.0"
994 | checksum: 3da81187ce003dd4178c7adc4674052fb8befc8f1a700ae4c8227755f38581a4ae963866dc4857488d62d1dc9837606c9f2f435fa1332f62a0f1c49b83c6a822
995 | languageName: node
996 | linkType: hard
997 |
998 | "micromark-factory-title@npm:^1.0.0":
999 | version: 1.1.0
1000 | resolution: "micromark-factory-title@npm:1.1.0"
1001 | dependencies:
1002 | micromark-factory-space: "npm:^1.0.0"
1003 | micromark-util-character: "npm:^1.0.0"
1004 | micromark-util-symbol: "npm:^1.0.0"
1005 | micromark-util-types: "npm:^1.0.0"
1006 | checksum: cf8c687d1d5c3928846a4791d4a7e2f1d7bdd2397051e20d60f06b7565a48bf85198ab6f85735e997ab3f0cbb80b8b6391f4f7ebc0aae2f2f8c3a08541257bf6
1007 | languageName: node
1008 | linkType: hard
1009 |
1010 | "micromark-factory-whitespace@npm:^1.0.0":
1011 | version: 1.1.0
1012 | resolution: "micromark-factory-whitespace@npm:1.1.0"
1013 | dependencies:
1014 | micromark-factory-space: "npm:^1.0.0"
1015 | micromark-util-character: "npm:^1.0.0"
1016 | micromark-util-symbol: "npm:^1.0.0"
1017 | micromark-util-types: "npm:^1.0.0"
1018 | checksum: 7248cc4534f9befb38c6f398b6e38efd3199f1428fc214c9cb7ed5b6e9fa7a82c0d8cdfa9bcacde62887c9a7c8c46baf5c318b2ae8f701afbccc8ad702e92dce
1019 | languageName: node
1020 | linkType: hard
1021 |
1022 | "micromark-util-character@npm:^1.0.0":
1023 | version: 1.2.0
1024 | resolution: "micromark-util-character@npm:1.2.0"
1025 | dependencies:
1026 | micromark-util-symbol: "npm:^1.0.0"
1027 | micromark-util-types: "npm:^1.0.0"
1028 | checksum: 3390a675a50731b58a8e5493cd802e190427f10fa782079b455b00f6b54e406e36882df7d4a3bd32b709f7a2c3735b4912597ebc1c0a99566a8d8d0b816e2cd4
1029 | languageName: node
1030 | linkType: hard
1031 |
1032 | "micromark-util-chunked@npm:^1.0.0":
1033 | version: 1.1.0
1034 | resolution: "micromark-util-chunked@npm:1.1.0"
1035 | dependencies:
1036 | micromark-util-symbol: "npm:^1.0.0"
1037 | checksum: 59534cf4aaf481ed58d65478d00eae0080df9b5816673f79b5ddb0cea263e5a9ee9cbb6cc565daf1eb3c8c4ff86fc4e25d38a0577539655cda823a4249efd358
1038 | languageName: node
1039 | linkType: hard
1040 |
1041 | "micromark-util-classify-character@npm:^1.0.0":
1042 | version: 1.1.0
1043 | resolution: "micromark-util-classify-character@npm:1.1.0"
1044 | dependencies:
1045 | micromark-util-character: "npm:^1.0.0"
1046 | micromark-util-symbol: "npm:^1.0.0"
1047 | micromark-util-types: "npm:^1.0.0"
1048 | checksum: 3266453dc0fdaf584e24c9b3c91d1ed180f76b5856699c51fd2549305814fcab7ec52afb4d3e83d002a9115cd2d2b2ffdc9c0b38ed85120822bf515cc00636ec
1049 | languageName: node
1050 | linkType: hard
1051 |
1052 | "micromark-util-combine-extensions@npm:^1.0.0":
1053 | version: 1.1.0
1054 | resolution: "micromark-util-combine-extensions@npm:1.1.0"
1055 | dependencies:
1056 | micromark-util-chunked: "npm:^1.0.0"
1057 | micromark-util-types: "npm:^1.0.0"
1058 | checksum: 0bc572fab3fe77f533c29aa1b75cb847b9fc9455f67a98623ef9740b925c0b0426ad9f09bbb56f1e844ea9ebada7873d1f06d27f7c979a917692b273c4b69e31
1059 | languageName: node
1060 | linkType: hard
1061 |
1062 | "micromark-util-decode-numeric-character-reference@npm:^1.0.0":
1063 | version: 1.1.0
1064 | resolution: "micromark-util-decode-numeric-character-reference@npm:1.1.0"
1065 | dependencies:
1066 | micromark-util-symbol: "npm:^1.0.0"
1067 | checksum: 64ef2575e3fc2426976c19e16973348f20b59ddd5543f1467ac2e251f29e0a91f12089703d29ae985b0b9a408ee0d72f06d04ed3920811aa2402aabca3bdf9e4
1068 | languageName: node
1069 | linkType: hard
1070 |
1071 | "micromark-util-decode-string@npm:^1.0.0":
1072 | version: 1.1.0
1073 | resolution: "micromark-util-decode-string@npm:1.1.0"
1074 | dependencies:
1075 | decode-named-character-reference: "npm:^1.0.0"
1076 | micromark-util-character: "npm:^1.0.0"
1077 | micromark-util-decode-numeric-character-reference: "npm:^1.0.0"
1078 | micromark-util-symbol: "npm:^1.0.0"
1079 | checksum: 757a0aaa5ad6c50c7480bd75371d407ac75f5022cd4404aba07adadf1448189502aea9bb7b2d09d25e18745e0abf72b95506b6beb184bcccabe919e48e3a5df7
1080 | languageName: node
1081 | linkType: hard
1082 |
1083 | "micromark-util-encode@npm:^1.0.0":
1084 | version: 1.1.0
1085 | resolution: "micromark-util-encode@npm:1.1.0"
1086 | checksum: 9878c9bc96999d45626a7597fffac85348ea842dce75d2417345cbf070a9941c62477bd0963bef37d4f0fd29f2982be6ddf416d62806f00ccb334af9d6ee87e7
1087 | languageName: node
1088 | linkType: hard
1089 |
1090 | "micromark-util-html-tag-name@npm:^1.0.0":
1091 | version: 1.2.0
1092 | resolution: "micromark-util-html-tag-name@npm:1.2.0"
1093 | checksum: 15421869678d36b4fe51df453921e8186bff514a14e9f79f32b7e1cdd67874e22a66ad34a7f048dd132cbbbfc7c382ae2f777a2bfd1f245a47705dc1c6d4f199
1094 | languageName: node
1095 | linkType: hard
1096 |
1097 | "micromark-util-normalize-identifier@npm:^1.0.0":
1098 | version: 1.1.0
1099 | resolution: "micromark-util-normalize-identifier@npm:1.1.0"
1100 | dependencies:
1101 | micromark-util-symbol: "npm:^1.0.0"
1102 | checksum: a9657321a2392584e4d978061882117a84db7d2c2c1c052c0f5d25da089d463edb9f956d5beaf7f5768984b6f72d046d59b5972951ec7bf25397687a62b8278a
1103 | languageName: node
1104 | linkType: hard
1105 |
1106 | "micromark-util-resolve-all@npm:^1.0.0":
1107 | version: 1.1.0
1108 | resolution: "micromark-util-resolve-all@npm:1.1.0"
1109 | dependencies:
1110 | micromark-util-types: "npm:^1.0.0"
1111 | checksum: b5c95484c06e87bbbb60d8430eb030a458733a5270409f4c67892d1274737087ca6a7ca888987430e57cf1dcd44bb16390d3b3936a2bf07f7534ec8f52ce43c9
1112 | languageName: node
1113 | linkType: hard
1114 |
1115 | "micromark-util-sanitize-uri@npm:^1.0.0":
1116 | version: 1.2.0
1117 | resolution: "micromark-util-sanitize-uri@npm:1.2.0"
1118 | dependencies:
1119 | micromark-util-character: "npm:^1.0.0"
1120 | micromark-util-encode: "npm:^1.0.0"
1121 | micromark-util-symbol: "npm:^1.0.0"
1122 | checksum: dbdb98248e9f0408c7a00f1c1cd805775b41d213defd659533835f34b38da38e8f990bf7b3f782e96bffbc549aec9c3ecdab197d4ad5adbfe08f814a70327b6e
1123 | languageName: node
1124 | linkType: hard
1125 |
1126 | "micromark-util-subtokenize@npm:^1.0.0":
1127 | version: 1.1.0
1128 | resolution: "micromark-util-subtokenize@npm:1.1.0"
1129 | dependencies:
1130 | micromark-util-chunked: "npm:^1.0.0"
1131 | micromark-util-symbol: "npm:^1.0.0"
1132 | micromark-util-types: "npm:^1.0.0"
1133 | uvu: "npm:^0.5.0"
1134 | checksum: f292b1b162845db50d36255c9d4c4c6d47931fbca3ac98a80c7e536d2163233fd662f8ca0479ee2b80f145c66a1394c7ed17dfce801439741211015e77e3901e
1135 | languageName: node
1136 | linkType: hard
1137 |
1138 | "micromark-util-symbol@npm:^1.0.0":
1139 | version: 1.1.0
1140 | resolution: "micromark-util-symbol@npm:1.1.0"
1141 | checksum: 10ceaed33a90e6bfd3a5d57053dbb53f437d4809cc11430b5a09479c0ba601577059be9286df4a7eae6e350a60a2575dc9fa9d9872b5b8d058c875e075c33803
1142 | languageName: node
1143 | linkType: hard
1144 |
1145 | "micromark-util-types@npm:^1.0.0, micromark-util-types@npm:^1.0.1":
1146 | version: 1.1.0
1147 | resolution: "micromark-util-types@npm:1.1.0"
1148 | checksum: a9749cb0a12a252ff536baabcb7012421b6fad4d91a5fdd80d7b33dc7b4c22e2d0c4637dfe5b902d00247fe6c9b01f4a24fce6b572b16ccaa4da90e6ce2a11e4
1149 | languageName: node
1150 | linkType: hard
1151 |
1152 | "micromark@npm:^3.0.0":
1153 | version: 3.2.0
1154 | resolution: "micromark@npm:3.2.0"
1155 | dependencies:
1156 | "@types/debug": "npm:^4.0.0"
1157 | debug: "npm:^4.0.0"
1158 | decode-named-character-reference: "npm:^1.0.0"
1159 | micromark-core-commonmark: "npm:^1.0.1"
1160 | micromark-factory-space: "npm:^1.0.0"
1161 | micromark-util-character: "npm:^1.0.0"
1162 | micromark-util-chunked: "npm:^1.0.0"
1163 | micromark-util-combine-extensions: "npm:^1.0.0"
1164 | micromark-util-decode-numeric-character-reference: "npm:^1.0.0"
1165 | micromark-util-encode: "npm:^1.0.0"
1166 | micromark-util-normalize-identifier: "npm:^1.0.0"
1167 | micromark-util-resolve-all: "npm:^1.0.0"
1168 | micromark-util-sanitize-uri: "npm:^1.0.0"
1169 | micromark-util-subtokenize: "npm:^1.0.0"
1170 | micromark-util-symbol: "npm:^1.0.0"
1171 | micromark-util-types: "npm:^1.0.1"
1172 | uvu: "npm:^0.5.0"
1173 | checksum: f243e805d1b3cc699fddae2de0b1492bc82462f1a709d7ae5c82039f88b1e009c959100184717e748be057b5f88603289d5681679a4e6fbabcd037beb34bc744
1174 | languageName: node
1175 | linkType: hard
1176 |
1177 | "mri@npm:^1.1.0":
1178 | version: 1.2.0
1179 | resolution: "mri@npm:1.2.0"
1180 | checksum: a3d32379c2554cf7351db6237ddc18dc9e54e4214953f3da105b97dc3babe0deb3ffe99cf409b38ea47cc29f9430561ba6b53b24ab8f9ce97a4b50409e4a50e7
1181 | languageName: node
1182 | linkType: hard
1183 |
1184 | "ms@npm:2.1.2":
1185 | version: 2.1.2
1186 | resolution: "ms@npm:2.1.2"
1187 | checksum: a437714e2f90dbf881b5191d35a6db792efbca5badf112f87b9e1c712aace4b4b9b742dd6537f3edf90fd6f684de897cec230abde57e87883766712ddda297cc
1188 | languageName: node
1189 | linkType: hard
1190 |
1191 | "non-layered-tidy-tree-layout@npm:^2.0.2":
1192 | version: 2.0.2
1193 | resolution: "non-layered-tidy-tree-layout@npm:2.0.2"
1194 | checksum: 73856e9959667193e733a7ef2b06a69421f4d9d7428a3982ce39763cd979a04eed0007f2afb3414afa3f6dc4dc6b5c850c2af9aa71a974475236a465093ec9c7
1195 | languageName: node
1196 | linkType: hard
1197 |
1198 | "robust-predicates@npm:^3.0.2":
1199 | version: 3.0.2
1200 | resolution: "robust-predicates@npm:3.0.2"
1201 | checksum: 4ecd53649f1c2d49529c85518f2fa69ffb2f7a4453f7fd19c042421c7b4d76c3efb48bc1c740c8f7049346d7cb58cf08ee0c9adaae595cc23564d360adb1fde4
1202 | languageName: node
1203 | linkType: hard
1204 |
1205 | "rw@npm:1":
1206 | version: 1.3.3
1207 | resolution: "rw@npm:1.3.3"
1208 | checksum: b1e1ef37d1e79d9dc7050787866e30b6ddcb2625149276045c262c6b4d53075ddc35f387a856a8e76f0d0df59f4cd58fe24707e40797ebee66e542b840ed6a53
1209 | languageName: node
1210 | linkType: hard
1211 |
1212 | "sade@npm:^1.7.3":
1213 | version: 1.8.1
1214 | resolution: "sade@npm:1.8.1"
1215 | dependencies:
1216 | mri: "npm:^1.1.0"
1217 | checksum: da8a3a5d667ad5ce3bf6d4f054bbb9f711103e5df21003c5a5c1a8a77ce12b640ed4017dd423b13c2307ea7e645adee7c2ae3afe8051b9db16a6f6d3da3f90b1
1218 | languageName: node
1219 | linkType: hard
1220 |
1221 | "safer-buffer@npm:>= 2.1.2 < 3.0.0":
1222 | version: 2.1.2
1223 | resolution: "safer-buffer@npm:2.1.2"
1224 | checksum: 7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4
1225 | languageName: node
1226 | linkType: hard
1227 |
1228 | "stylis@npm:^4.1.3":
1229 | version: 4.3.2
1230 | resolution: "stylis@npm:4.3.2"
1231 | checksum: 0410e1404cbeee3388a9e17587875211ce2f014c8379af0d1e24ca55878867c9f1ccc7b0ce9a156ca53f5d6e301391a82b0645522a604674a378b3189a4a1994
1232 | languageName: node
1233 | linkType: hard
1234 |
1235 | "ts-dedent@npm:^2.2.0":
1236 | version: 2.2.0
1237 | resolution: "ts-dedent@npm:2.2.0"
1238 | checksum: 175adea838468cc2ff7d5e97f970dcb798bbcb623f29c6088cb21aa2880d207c5784be81ab1741f56b9ac37840cbaba0c0d79f7f8b67ffe61c02634cafa5c303
1239 | languageName: node
1240 | linkType: hard
1241 |
1242 | "tslib@npm:^2.6.3":
1243 | version: 2.6.3
1244 | resolution: "tslib@npm:2.6.3"
1245 | checksum: 2598aef53d9dbe711af75522464b2104724d6467b26a60f2bdac8297d2b5f1f6b86a71f61717384aa8fd897240467aaa7bcc36a0700a0faf751293d1331db39a
1246 | languageName: node
1247 | linkType: hard
1248 |
1249 | "typescript@npm:^5.5.2":
1250 | version: 5.5.2
1251 | resolution: "typescript@npm:5.5.2"
1252 | bin:
1253 | tsc: bin/tsc
1254 | tsserver: bin/tsserver
1255 | checksum: 8ca39b27b5f9bd7f32db795045933ab5247897660627251e8254180b792a395bf061ea7231947d5d7ffa5cb4cc771970fd4ef543275f9b559f08c9325cccfce3
1256 | languageName: node
1257 | linkType: hard
1258 |
1259 | "typescript@patch:typescript@npm%3A^5.5.2#optional!builtin":
1260 | version: 5.5.2
1261 | resolution: "typescript@patch:typescript@npm%3A5.5.2#optional!builtin::version=5.5.2&hash=e012d7"
1262 | bin:
1263 | tsc: bin/tsc
1264 | tsserver: bin/tsserver
1265 | checksum: 6721ac8933a70c252d7b640b345792e103d881811ff660355617c1836526dbb71c2044e2e77a8823fb3570b469f33276875a4cab6d3c4de4ae7d7ee1c3074ae4
1266 | languageName: node
1267 | linkType: hard
1268 |
1269 | "unist-util-stringify-position@npm:^3.0.0":
1270 | version: 3.0.3
1271 | resolution: "unist-util-stringify-position@npm:3.0.3"
1272 | dependencies:
1273 | "@types/unist": "npm:^2.0.0"
1274 | checksum: 14550027825230528f6437dad7f2579a841780318569851291be6c8a970bae6f65a7feb24dabbcfce0e5e68cacae85bf12cbda3f360f7c873b4db602bdf7bb21
1275 | languageName: node
1276 | linkType: hard
1277 |
1278 | "uuid@npm:^9.0.0":
1279 | version: 9.0.1
1280 | resolution: "uuid@npm:9.0.1"
1281 | bin:
1282 | uuid: dist/bin/uuid
1283 | checksum: 1607dd32ac7fc22f2d8f77051e6a64845c9bce5cd3dd8aa0070c074ec73e666a1f63c7b4e0f4bf2bc8b9d59dc85a15e17807446d9d2b17c8485fbc2147b27f9b
1284 | languageName: node
1285 | linkType: hard
1286 |
1287 | "uvu@npm:^0.5.0":
1288 | version: 0.5.6
1289 | resolution: "uvu@npm:0.5.6"
1290 | dependencies:
1291 | dequal: "npm:^2.0.0"
1292 | diff: "npm:^5.0.0"
1293 | kleur: "npm:^4.0.3"
1294 | sade: "npm:^1.7.3"
1295 | bin:
1296 | uvu: bin.js
1297 | checksum: ad32eb5f7d94bdeb71f80d073003f0138e24f61ed68cecc8e15d2f30838f44c9670577bb1775c8fac894bf93d1bc1583d470a9195e49bfa6efa14cc6f4942bff
1298 | languageName: node
1299 | linkType: hard
1300 |
1301 | "web-worker@npm:^1.2.0":
1302 | version: 1.3.0
1303 | resolution: "web-worker@npm:1.3.0"
1304 | checksum: bca341b421f07c2d33aa205d463e6a2d3d376fb0628a01052dc343fd88a1d688df58d1c7fe36f631d0d860bbd3060f5014cca67d6f8781634b6c2fae25d1fc70
1305 | languageName: node
1306 | linkType: hard
1307 |
--------------------------------------------------------------------------------