From 7ef1cd8a6717efa9e2d8851393b73e556c75cecc Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:05:22 -0500 Subject: [PATCH 01/26] clean up docs --- docs/docs/quickstarts/congratulations.md | 23 --- docs/docs/quickstarts/create-a-blog-post.md | 34 ----- docs/docs/quickstarts/create-a-document.md | 57 -------- docs/docs/quickstarts/create-a-page.md | 0 docs/docs/quickstarts/deploy-your-site.md | 31 ---- docs/docs/quickstarts/markdown-features.mdx | 152 -------------------- 6 files changed, 297 deletions(-) delete mode 100644 docs/docs/quickstarts/congratulations.md delete mode 100644 docs/docs/quickstarts/create-a-blog-post.md delete mode 100644 docs/docs/quickstarts/create-a-document.md delete mode 100644 docs/docs/quickstarts/create-a-page.md delete mode 100644 docs/docs/quickstarts/deploy-your-site.md delete mode 100644 docs/docs/quickstarts/markdown-features.mdx diff --git a/docs/docs/quickstarts/congratulations.md b/docs/docs/quickstarts/congratulations.md deleted file mode 100644 index 04771a0..0000000 --- a/docs/docs/quickstarts/congratulations.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -sidebar_position: 6 ---- - -# Congratulations! - -You have just learned the **basics of Docusaurus** and made some changes to the **initial template**. - -Docusaurus has **much more to offer**! - -Have **5 more minutes**? Take a look at **[versioning](../tutorial-extras/manage-docs-versions.md)** and **[i18n](../tutorial-extras/translate-your-site.md)**. - -Anything **unclear** or **buggy** in this tutorial? [Please report it!](https://github.com/facebook/docusaurus/discussions/4610) - -## What's next? - -- Read the [official documentation](https://docusaurus.io/) -- Modify your site configuration with [`docusaurus.config.js`](https://docusaurus.io/docs/api/docusaurus-config) -- Add navbar and footer items with [`themeConfig`](https://docusaurus.io/docs/api/themes/configuration) -- Add a custom [Design and Layout](https://docusaurus.io/docs/styling-layout) -- Add a [search bar](https://docusaurus.io/docs/search) -- Find inspirations in the [Docusaurus showcase](https://docusaurus.io/showcase) -- Get involved in the [Docusaurus Community](https://docusaurus.io/community/support) diff --git a/docs/docs/quickstarts/create-a-blog-post.md b/docs/docs/quickstarts/create-a-blog-post.md deleted file mode 100644 index 550ae17..0000000 --- a/docs/docs/quickstarts/create-a-blog-post.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -sidebar_position: 3 ---- - -# Create a Blog Post - -Docusaurus creates a **page for each blog post**, but also a **blog index page**, a **tag system**, an **RSS** feed... - -## Create your first Post - -Create a file at `blog/2021-02-28-greetings.md`: - -```md title="blog/2021-02-28-greetings.md" ---- -slug: greetings -title: Greetings! -authors: - - name: Joel Marcey - title: Co-creator of Docusaurus 1 - url: https://github.com/JoelMarcey - image_url: https://github.com/JoelMarcey.png - - name: Sébastien Lorber - title: Docusaurus maintainer - url: https://sebastienlorber.com - image_url: https://github.com/slorber.png -tags: [greetings] ---- - -Congratulations, you have made your first post! - -Feel free to play around and edit this post as much as you like. -``` - -A new blog post is now available at [http://localhost:3000/blog/greetings](http://localhost:3000/blog/greetings). diff --git a/docs/docs/quickstarts/create-a-document.md b/docs/docs/quickstarts/create-a-document.md deleted file mode 100644 index 28b0262..0000000 --- a/docs/docs/quickstarts/create-a-document.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -sidebar_position: 2 ---- - -# Create a Document - -Documents are **groups of pages** connected through: - -- a **sidebar** -- **previous/next navigation** -- **versioning** - -## Create your first Doc - -Create a Markdown file at `docs/hello.md`: - -```md title="docs/hello.md" -# Hello - -This is my **first Docusaurus document**! -``` - -A new document is now available at [http://localhost:3000/docs/hello](http://localhost:3000/docs/hello). - -## Configure the Sidebar - -Docusaurus automatically **creates a sidebar** from the `docs` folder. - -Add metadata to customize the sidebar label and position: - -```md title="docs/hello.md" {1-4} ---- -sidebar_label: 'Hi!' -sidebar_position: 3 ---- - -# Hello - -This is my **first Docusaurus document**! -``` - -It is also possible to create your sidebar explicitly in `sidebars.js`: - -```js title="sidebars.js" -export default { - tutorialSidebar: [ - 'intro', - // highlight-next-line - 'hello', - { - type: 'category', - label: 'Tutorial', - items: ['quickstarts/create-a-document'], - }, - ], -}; -``` diff --git a/docs/docs/quickstarts/create-a-page.md b/docs/docs/quickstarts/create-a-page.md deleted file mode 100644 index e69de29..0000000 diff --git a/docs/docs/quickstarts/deploy-your-site.md b/docs/docs/quickstarts/deploy-your-site.md deleted file mode 100644 index 1c50ee0..0000000 --- a/docs/docs/quickstarts/deploy-your-site.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -sidebar_position: 5 ---- - -# Deploy your site - -Docusaurus is a **static-site-generator** (also called **[Jamstack](https://jamstack.org/)**). - -It builds your site as simple **static HTML, JavaScript and CSS files**. - -## Build your site - -Build your site **for production**: - -```bash -npm run build -``` - -The static files are generated in the `build` folder. - -## Deploy your site - -Test your production build locally: - -```bash -npm run serve -``` - -The `build` folder is now served at [http://localhost:3000/](http://localhost:3000/). - -You can now deploy the `build` folder **almost anywhere** easily, **for free** or very small cost (read the **[Deployment Guide](https://docusaurus.io/docs/deployment)**). diff --git a/docs/docs/quickstarts/markdown-features.mdx b/docs/docs/quickstarts/markdown-features.mdx deleted file mode 100644 index 1f42792..0000000 --- a/docs/docs/quickstarts/markdown-features.mdx +++ /dev/null @@ -1,152 +0,0 @@ ---- -sidebar_position: 4 ---- - -# Markdown Features - -Docusaurus supports **[Markdown](https://daringfireball.net/projects/markdown/syntax)** and a few **additional features**. - -## Front Matter - -Markdown documents have metadata at the top called [Front Matter](https://jekyllrb.com/docs/front-matter/): - -```text title="my-doc.md" -// highlight-start ---- -id: my-doc-id -title: My document title -description: My document description -slug: /my-custom-url ---- -// highlight-end - -## Markdown heading - -Markdown text with [links](./hello.md) -``` - -## Links - -Regular Markdown links are supported, using url paths or relative file paths. - -```md -Let's see how to [Create a page](/recommended). -``` - -```md -Let's see how to [Create a page](./recommended.md). -``` - -**Result:** Let's see how to [Create a page](./recommended.md). - -## Images - -Regular Markdown images are supported. - -You can use absolute paths to reference images in the static directory (`static/img/docusaurus.png`): - -```md -![Docusaurus logo](/img/docusaurus.png) -``` - -![Docusaurus logo](/img/docusaurus.png) - -You can reference images relative to the current file as well. This is particularly useful to colocate images close to the Markdown files using them: - -```md -![Docusaurus logo](./img/docusaurus.png) -``` - -## Code Blocks - -Markdown code blocks are supported with Syntax highlighting. - -````md -```jsx title="src/components/HelloDocusaurus.js" -function HelloDocusaurus() { - return

Hello, Docusaurus!

; -} -``` -```` - -```jsx title="src/components/HelloDocusaurus.js" -function HelloDocusaurus() { - return

Hello, Docusaurus!

; -} -``` - -## Admonitions - -Docusaurus has a special syntax to create admonitions and callouts: - -```md -:::tip My tip - -Use this awesome feature option - -::: - -:::danger Take care - -This action is dangerous - -::: -``` - -:::tip My tip - -Use this awesome feature option - -::: - -:::danger Take care - -This action is dangerous - -::: - -## MDX and React Components - -[MDX](https://mdxjs.com/) can make your documentation more **interactive** and allows using any **React components inside Markdown**: - -```jsx -export const Highlight = ({children, color}) => ( - { - alert(`You clicked the color ${color} with label ${children}`) - }}> - {children} - -); - -This is Docusaurus green ! - -This is Facebook blue ! -``` - -export const Highlight = ({children, color}) => ( - { - alert(`You clicked the color ${color} with label ${children}`); - }}> - {children} - -); - -This is Docusaurus green ! - -This is Facebook blue ! From a805d1f0ad8bd09f13f2289fc372d80aac5e2329 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:10:41 -0500 Subject: [PATCH 02/26] tabs --- docs/docs/quickstarts/installation.md | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/docs/docs/quickstarts/installation.md b/docs/docs/quickstarts/installation.md index 46d13d0..65c5ea8 100644 --- a/docs/docs/quickstarts/installation.md +++ b/docs/docs/quickstarts/installation.md @@ -1,13 +1,32 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + # Installation Create a new Python 3.12 virtual environment and install RA.Aid: ```bash uv venv -p 3.12 -source .venv/bin/activate # On Unix/macOS -# or -.venv\Scripts\activate # On Windows +``` + + + +```bash +source .venv/bin/activate +``` + + + + +```bash +.venv\Scripts\activate +``` + + + + +```bash uv pip install ra-aid ``` From 35cd07c324018cb2495e657d1e4cf6f64da5d609 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:14:37 -0500 Subject: [PATCH 03/26] uv install --- docs/docs/quickstarts/installation.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/docs/quickstarts/installation.md b/docs/docs/quickstarts/installation.md index 65c5ea8..bc84efd 100644 --- a/docs/docs/quickstarts/installation.md +++ b/docs/docs/quickstarts/installation.md @@ -5,6 +5,8 @@ import TabItem from '@theme/TabItem'; Create a new Python 3.12 virtual environment and install RA.Aid: +First install [uv](https://docs.astral.sh/uv/getting-started/installation/), then: + ```bash uv venv -p 3.12 ``` From dbf3d83523f55aa922c15899bbf917ea27f8bb29 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:20:02 -0500 Subject: [PATCH 04/26] installation docs --- docs/docs/quickstarts/installation.md | 29 +++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/docs/docs/quickstarts/installation.md b/docs/docs/quickstarts/installation.md index bc84efd..314f540 100644 --- a/docs/docs/quickstarts/installation.md +++ b/docs/docs/quickstarts/installation.md @@ -3,6 +3,9 @@ import TabItem from '@theme/TabItem'; # Installation + + + Create a new Python 3.12 virtual environment and install RA.Aid: First install [uv](https://docs.astral.sh/uv/getting-started/installation/), then: @@ -32,4 +35,30 @@ source .venv/bin/activate uv pip install ra-aid ``` + + + +Install RA.Aid using pip: + +```bash +pip install ra-aid +``` + +:::note +If you're using Python 3.13 or newer, we recommend using the UV installation method instead due to compatibility issues with newer Python versions. +::: + + + + +Install RA.Aid using Homebrew: + +```bash +brew tap ai-christianson/homebrew-ra-aid +brew install ra-aid +``` + + + + Once installed, see the [Recommended Configuration](recommended) to set up RA.Aid with the recommended settings. From b8021519a6650f5a4ed714b767f27f854f00cddf Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:22:26 -0500 Subject: [PATCH 05/26] installation docs --- docs/docs/quickstarts/installation.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/docs/quickstarts/installation.md b/docs/docs/quickstarts/installation.md index 314f540..027b481 100644 --- a/docs/docs/quickstarts/installation.md +++ b/docs/docs/quickstarts/installation.md @@ -3,6 +3,8 @@ import TabItem from '@theme/TabItem'; # Installation +Getting started is easy! You can install RA.Aid using any of these three methods - pick the one that works best for you: + From 1060117871e3080372c8b4b0905170327b79d07d Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:28:57 -0500 Subject: [PATCH 06/26] docs/intro --- docs/docs/intro.md | 63 ++++++++++++++++++++++++++-------------------- 1 file changed, 36 insertions(+), 27 deletions(-) diff --git a/docs/docs/intro.md b/docs/docs/intro.md index acd2ad2..057f01f 100644 --- a/docs/docs/intro.md +++ b/docs/docs/intro.md @@ -3,46 +3,55 @@ sidebar_position: 1 slug: / --- -# Intro +# Welcome to RA.Aid -Let's discover **Docusaurus in less than 5 minutes**. +RA.Aid (pronounced "raid") is your AI-powered development companion that helps you build software autonomously. Whether you're working on new features, refactoring code, or researching solutions, RA.Aid makes development faster and more efficient. -## Getting Started +## Why RA.Aid? -Get started by **creating a new site**. +- 🤖 **Autonomous Development**: Let RA.Aid handle complex programming tasks while you focus on the big picture +- 🔍 **Smart Research**: Automatically researches solutions and best practices +- 📋 **Intelligent Planning**: Breaks down complex tasks into manageable steps +- 💬 **Interactive Mode**: Get help when you need it through natural conversation -Or **try Docusaurus immediately** with **[docusaurus.new](https://docusaurus.new)**. +## Quick Start -### What you'll need +Ready to get started? Jump right to: -- [Node.js](https://nodejs.org/en/download/) version 18.0 or above: - - When installing Node.js, you are recommended to check all checkboxes related to dependencies. +- [Installation Guide](/installation) +- [Configuration Guide](/configuration) +- [Basic Usage Examples](/usage) -## Generate a new site +### Basic Example -Generate a new Docusaurus site using the **classic template**. - -The classic template will automatically be added to your project after you run the command: +Here's how simple it is to use RA.Aid: ```bash -npm init docusaurus@latest my-website classic +# Install RA.Aid +pip install ra-aid + +# Set up API keys +export ANTHROPIC_API_KEY=your_key_here +export OPENAI_API_KEY=your_key_here +export TAVILY_API_KEY=your_key_here + +# Start using it +ra-aid -m "Add input validation to the login form" ``` -You can type this command into Command Prompt, Powershell, Terminal, or any other integrated terminal of your code editor. +## Key Features -The command also installs all necessary dependencies you need to run Docusaurus. +- **Three-Stage Workflow**: Research → Planning → Implementation +- **Web Research**: Automatically searches for best practices and solutions +- **Interactive Mode**: Get help when you need it through natural conversation +- **Multiple AI Providers**: Support for various AI models to suit your needs +- **Git Integration**: Works seamlessly with your version control -## Start your site +## Next Steps -Run the development server: +- Check out the [Installation Guide](/installation) to set up RA.Aid +- Learn about [Configuration](/configuration) options +- See [Usage Examples](/usage) to get started quickly +- Join our [Discord Community](https://discord.gg/f6wYbzHYxV) for help and discussions -```bash -cd my-website -npm run start -``` - -The `cd` command changes the directory you're working with. In order to work with your newly created Docusaurus site, you'll need to navigate the terminal there. - -The `npm run start` command builds your website locally and serves it through a development server, ready for you to view at http://localhost:3000/. - -Open `docs/intro.md` (this page) and edit some lines: the site **reloads automatically** and displays your changes. +Ready to revolutionize your development workflow? Let's get started! 🚀 From 3136e100fe6e27f50700923f0bca303635d9d415 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:29:59 -0500 Subject: [PATCH 07/26] docs cleanup --- docs/docs/getting-started.md | 1 - 1 file changed, 1 deletion(-) delete mode 100644 docs/docs/getting-started.md diff --git a/docs/docs/getting-started.md b/docs/docs/getting-started.md deleted file mode 100644 index 31f793a..0000000 --- a/docs/docs/getting-started.md +++ /dev/null @@ -1 +0,0 @@ ----\nsidebar_position: 1\n---\n\n# Getting Started\n\nWelcome to the documentation. This page will help you get started. From d399c34640c449663daac339c9a5f150ec2cd702 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:39:15 -0500 Subject: [PATCH 08/26] docs cleanup --- docs/docs/{quickstarts => quickstart}/_category_.json | 0 docs/docs/{quickstarts => quickstart}/installation.md | 0 docs/docs/{quickstarts => quickstart}/recommended.md | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename docs/docs/{quickstarts => quickstart}/_category_.json (100%) rename docs/docs/{quickstarts => quickstart}/installation.md (100%) rename docs/docs/{quickstarts => quickstart}/recommended.md (100%) diff --git a/docs/docs/quickstarts/_category_.json b/docs/docs/quickstart/_category_.json similarity index 100% rename from docs/docs/quickstarts/_category_.json rename to docs/docs/quickstart/_category_.json diff --git a/docs/docs/quickstarts/installation.md b/docs/docs/quickstart/installation.md similarity index 100% rename from docs/docs/quickstarts/installation.md rename to docs/docs/quickstart/installation.md diff --git a/docs/docs/quickstarts/recommended.md b/docs/docs/quickstart/recommended.md similarity index 100% rename from docs/docs/quickstarts/recommended.md rename to docs/docs/quickstart/recommended.md From 7e3f2e12604ff9b6c21faa53b202ac64744d729c Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:39:20 -0500 Subject: [PATCH 09/26] docs cleanup --- docs/docs/quickstart/_category_.json | 2 +- docs/docs/tutorial-extras/_category_.json | 7 -- .../img/docsVersionDropdown.png | Bin 25427 -> 0 bytes .../tutorial-extras/img/localeDropdown.png | Bin 27841 -> 0 bytes .../tutorial-extras/manage-docs-versions.md | 55 ----------- .../tutorial-extras/translate-your-site.md | 88 ------------------ 6 files changed, 1 insertion(+), 151 deletions(-) delete mode 100644 docs/docs/tutorial-extras/_category_.json delete mode 100644 docs/docs/tutorial-extras/img/docsVersionDropdown.png delete mode 100644 docs/docs/tutorial-extras/img/localeDropdown.png delete mode 100644 docs/docs/tutorial-extras/manage-docs-versions.md delete mode 100644 docs/docs/tutorial-extras/translate-your-site.md diff --git a/docs/docs/quickstart/_category_.json b/docs/docs/quickstart/_category_.json index 4e464dc..7a04160 100644 --- a/docs/docs/quickstart/_category_.json +++ b/docs/docs/quickstart/_category_.json @@ -1,5 +1,5 @@ { - "label": "Quick Starts", + "label": "Quick Start", "position": 2, "link": { "type": "generated-index", diff --git a/docs/docs/tutorial-extras/_category_.json b/docs/docs/tutorial-extras/_category_.json deleted file mode 100644 index a8ffcc1..0000000 --- a/docs/docs/tutorial-extras/_category_.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "label": "Tutorial - Extras", - "position": 3, - "link": { - "type": "generated-index" - } -} diff --git a/docs/docs/tutorial-extras/img/docsVersionDropdown.png b/docs/docs/tutorial-extras/img/docsVersionDropdown.png deleted file mode 100644 index 97e4164618b5f8beda34cfa699720aba0ad2e342..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25427 zcmXte1yoes_ckHYAgy#tNK1DKBBcTn3PU5^T}n!qfaD-4ozfv4LwDEEJq$50_3{4x z>pN@insx5o``P<>PR`sD{a#y*n1Gf50|SFt{jJJJ3=B;7$BQ2i`|(aulU?)U*ArVs zEkz8BxRInHAp)8nI>5=Qj|{SgKRHpY8Ry*F2n1^VBGL?Y2BGzx`!tfBuaC=?of zbp?T3T_F&N$J!O-3J!-uAdp9^hx>=e$CsB7C=`18SZ;0}9^jW37uVO<=jZ2lcXu$@ zJsO3CUO~?u%jxN3Xeb0~W^VNu>-zc%jYJ_3NaW)Og*rVsy}P|ZAyHRQ=>7dY5`lPt zBOb#d9uO!r^6>ERF~*}E?CuV73AuO-adQoSc(}f~eKdXqKq64r*Ec7}r}qyJ7w4C& zYnwMWH~06jqoX6}6$F7oAQAA>v$K`84HOb_2fMqxfLvZ)Jm!ypKhlC99vsjyFhih^ zw5~26sa{^4o}S)ZUq8CfFD$QZY~RD-k7(-~+Y5^;Xe9d4YHDVFW_Dp}dhY!E;t~Sc z-`_twJHLiPPmYftdEeaJot~XuLN5Ok;SP3xcYk(%{;1g9?cL4o&HBdH!NCE4sP5eS z5)5{?w7d>Sz@gXBqvPX;d)V3e*~!Vt`NbpN`QF~%>G8?k?d{p=+05MH^2++^>gL7y z`OWR^!qO_h+;V4U=ltx9H&l0NdF}M{WO-%d{NfymLh?uGFRreeSy+L=;K`|3Bnl0M zUM>D-bGEXv<>loyv#@k=dAYW}1%W`P<`!PiGcK&G-`-w7>aw=6xwN*)z{qlNbg;3t z^O)Pi!#xywEfk@@yuK+QDEwCaUH{;SoPy%*&Fy2_>@T??kjrXND+-B>Ysz{4{Q2bO zytdB!)SqeR7Z*b#V`wz;Q9sbwBsm#*a%;Z0xa6Pm3dtYF3Ne7}oV>>#H$FLyfFpTc z@fjI^X>4kV`VsTHpy&bqaD992>*x36$&m_u8MOgAKnr zix1C^4Kv*>^8IV-8_jZkZSn%yscddBFqkpaRTTAnS5A$!9KdgBseck^JSIQS`wRWHIZ&85f`i++% z68t8XiOy$@M67#u+Xi6bxpuq+`HWa<2?N@OcnUhX?Fa0ucuMgFJFc-@1+=(NlQ>>F zRDxG-|GOh}P`zp=#(X0xY7b!pCjittaWhLjHXBB#-Po`?sO81ZebXXp;sg3B6U;yT z7ltQRr)1+s9JQ^V!592xtqynFYr$yy)8J4=_Fovpb*N%#EBk3~TNxng@wp@YN7Lqp zrjUU+o-9X*B{;#FfWF+8xsS-jI`K=*Kw`Xfb@RSO_U)QsNHa<|mWk9yQ?OwtR*_xq zmD=jg&|q#_bdPo=j-*xO@t@Lx#ApL+J`iqWlGkq6;4fv@4RCK_O9tc(xtrrh=-c5R z69GA#i8S&gK?|;>DM8&0G0qF?C*`-kOcVP3)1oi%f47pC4CS=HBdpf`E)$Hno3D*LM*Mxsl@|fX(Xf%aXWP!}X9^S#Vk`h=79=r%L^l^YWXw_fRl+4teQ3x9_*k%}TKmP12k&)U zMNC;?1$T%`tp^#EZUUbydm4SOs@A)}3PP>tiL3j_W06pb3vSHu)DJU-0m)ledRGV0 zJ|rcZ1U@_hCyPE6_-wiimvjR3t);y*Qdi`BKX*PP29RBAsD8W-^u0fLrRq zwCLWC=t#&Nb(JimFikS-+jq}=-klKJuPf|#4pY8f?a%e6U2$1>GPfs~QJLAlns4;O zgz6*qdCCdKNu92Gtjo^ob%T4S7Qi-4NMGg1!+m0yH08I3TITyT6-g}m=2u_lckZ^e zq;^$v+pjrNbh#BOPdii=sJ1bq8F?sZTJcTI5o-P0V#bJPYY`?awnv-41^CJh$BpLP z@aNtrc;&0^lO>O1M4Is=8YA9!yo9_AI^mA7`Aw!579-QByLL>P$1D=@r}QPn38D;% zpBWvkXSRS?b^4Pq$yjf%7Lcq#0#b>rLc!^-G|4-BD83fHp~~6CQ_U~u{@(n0go&P^ zDHT6>h=0KJ)xPF^Wh5@tUEbM@gb&7vU*9YcX;|;ESv3bj^6HmWbTMt;Zj&y(k;?)$ z!J2pIQeCULGqRb5%F}d?EV$v(x+Zqs7+Bj<=5FIW5H^? z1(+h@*b0z+BK^~jWy5DgMK&%&%93L?Zf|KQ%UaTMX@IwfuOw_Jnn?~71naulqtvrM zCrF)bGcGsZVHx6K%gUR%o`btyOIb@);w*? z0002^Q&|A-)1GGX(5lYp#|Rrzxbtv$Z=Yht;8I!nB~-^7QUe4_dcuTfjZzN&*WCjy z{r9Sr^dv=I%5Td#cFz>iZ_RSAK?IMTz<%#W)!YSnmft3Nlq~(I`{`Uk-Wm83Cik$W zA>ZEh#UqV*jtmtV`p(`VsJb>H>??z9lR#V(`9^UEGvTix4$!-_w1?L1)oZ^W!E0k* zCB7_q(G~1Q3x6mPdH1`hse+Jq;+?Cw?F&D*LQhHFoFJdd@$J@~sOg%)cymn7a4znI zCjvkBKBOSb2*i~|Qom$yT*r{rc!0nX+M`4zPT|h~`eXtS!4FPTH0(?%$=fr9Tr*nb z(TR6>{L$7k2WHlqIT4J->W-mYgM)ac(R(z56AY2Kiex&W>I$p+&x#bMNS&|p@eWOy zGD7es5=6U#uG^J26B@SERc=i`I+l4_*`E_OxW=&=4|rH=p;$GB!%As!i|~ypyq`M{ zX5L!TI*|QR-pt7Y$irT5b=w9KcWKG5oX;$>v|GNckJ5XfdZ#KHirMyigcqZ9UvabrO{ z8rDp1z0Fr%{{|@&ZFm^_46S#?HL)}=bp45eUvA1gf(mODfe+cGcF$6-ZaI;NvMu;v zcbHrkC+lE z7RwO#m?)*hw^|}s-z?wPDEMJ2%Ne3)j0Dnt?e(@i?bf<+s^BM?g^S5YKU~rg%aeTl zJf0#GyUY|~Y;9SV_?#uV9<{xsFjl^YeW{@1$61GkUgc9Xv6cL@uB^M?d@o7H zHKV^XV(Q|Q%Geas3dw$Jn&atPqxYB>>Ii<#Zv+@N8GYs#vrxfbS_%zJ#18<+55b3yBCV#A}|5J8EAtdUd zn{=~8r&YaM_GB^l@6D_xfSvmbrbJP^&RZ{np(I^~Osf9d>=xz;@EnY?(Egg`%_&Vt zJA2@>$gsV@XFKh@>0z#d4B>B{^W%bCgT;)f6R|f%yK=!bN2w`BOC_5VHz(Q+!7ID^ zl#oQ>nDe2!w&7tLJ8#8wzN%$7@_>{Hh2xdID<0$kb*>G$17$S3grFXLJQ>4!n!>-B zn>~N~Ri%vU@ccS?y8BTR)1#fe2q zlqzp;&z9I1lrZ*4NJn00*0|iPY)Z0d$3NTJ9HNQ+?JI;37?VSbqMkdoqyCsG=yp1B z-3WO8>t^=Fj^?PT?(-0dZ8y_FL2Z9`D!m-7Dgr7r>V~Rm8RQ@w>_PrbFo$N_#jGzx zKC&6u^^M`8cdv1&AJ-O}jSqCR94J?FnYw!JN3(k7cejfuS`7-j*t4GNaKH@|kkrB_uY?<%tF27r;kVj(nzxph1JsFr z#*%R0;+(NAevpx|F8|sz9}SI%^z@E#+KR{}h1fyNXo6z$e*+nNx|qKR4DoCl0?&Q@ zs8_MHOw&gA$VQz4yIo@Zg{!M@m9v_4{_V!x@I>5ZaG$rcOvUm9O0DW9tR>#oyg@l8O!7%+a(wcN zU}SdcI3?TjNeNXmMJ!GUx@tFbszrKU5?ewMLA zJ)^SSUMDXb)yO8<*A&?2bBN&NEk{+9q~*w%k^+OUs)b@Fs#!)#9E-|}*u zWAn}H61Uy!41$}d1d44D;guxTx^kD367XWM%5Dea)6$5&n;))D;D^r~G=m$CqS7L! zmLX|kejC<`PU-rS#;n2Y0*4;&?(ROps&9eVSDoY%G@-4kyG5AX|Fu&1M5Gm0(-Z6v%1@fS9$`LGCB zlH8i;1e!(dUd#1c@G(-^QedB)$yJ~Yke{h3 z$#|*Md8c7)??v!utM3QJT7mN@DE%_r@BYhvf))3qME|n>shVP(03fO0{Iye<3)wv9 zoYDZ$wDak&n*QW`-s6KKDk5X1OQ_ramOCv4gjh1}jy%9GX!s!hq`NW)&%o9y+YrmT z+u!YGVhHBA*{|c;^}Xg)elpF+dMcpHNALqheHQIX<8J#~;Ah^+Dw~L#CynKWfTWCu zCEbY3ybkQ225nUxd$i6(3SN^?}z{r>!_8$YiwX~LE`rzuT=q!8;h{UbMWDGL@VpWm; zZtr3$23sHj`&Co0No!R|5#Vt7{9}j|TwplkHdT=aUeQ*;9XQ2uW1WUTbA%kHwMR|UUq0xTEetKps9KmNYAS5aY+L31z8w-k=r7r5hSK=6A!^nU z8C>n~S?X}?D5`5c5&2wA0cxo;KgFAi4N2T%LF4fWoMQ=CTo>=1mjvBvW;|iPUB>xW z?K5>~6VIpJYo28I)EFl&7dAhqrB6A-(e-)leVf;X*$GA~eVokc6j+rvRq{{fZth{*dW0`N_!2w6Ll9fV z{aJuKFd-zavy0~QH9hD;H%Q(_Zn7nY>AkaeKuL7Q@G02wArkDPH53Qg5JGaH{_ehi z35yHf_=pB1wY&Ak3EZ-^Ml}MxJh6d_Z}jDN7RTDy68ton&H$4=>#b4w904+;t6CcZ zMtV{hLGR06a?g$sZA#7RlKPF4Bqk=}`#oc=#~O;oUX7hbb^NY3f2Nin?(&;E?zVkm zN}OTyV%mP6T5(MT-syZn(K?c9sk)z$K0AQvvk9#%4%)evu)aOXbB;x-*G5ljx|A;$ zZmCV}y(IS$SYPVS%g#3~I9lE#erA)7BgOkZC}~2)7B_BBStEVtr1+0nv{(A%zhmjT zsE;^zwY5(ZCyf%wwr*SJyK_?Gv_p!Oc-8$W?a03T_8q zb=XB6)**gF9AoG(=dN9-4yO7)FI}g2!0UFua`5ASTp*W2K#(fpZHPv2}6 zuI3YRPb*T9uhpKUc zPNT}NbGpABC}F~2UYA?vuN z*c2)mWKvZn<+PL%-Oq3lAhrw_j}+<$Tfvgoo)dRh((_MP7Iz=PwI|1>aObW5-b8qW zI@O0@c{EbVHN5a6k}i4y2?Jh~=Jd-MZnv)h^T1;2CAllrl%EHm`1{XUiW<7g+6{XS z&hVyh5*+TiVaO)+4PE3HcnsJajGx>gwo1EcWg^*Rn0l!#MVM%(Ywui_UjM8Dgspk@ z4`gne14lZ*`698%UOOx^(v_~kQiYj`WkY>(f5KDC5I{-Wi!KoINK)H^9m|SUliD=d zE;N>?`0x*{61(==UBrN}mpsdhOZ2N~I>oQ1avz|nvyfQQW_R6VAnn;IzqlxDB)0_Zw_Csf#5sdmb4LBwIyBk zv$NL*@acUJc4`FtA^-PzoHR zKXm{;9xP9kWW6MEPYuCeDqX@UiY(8GShF|L{-)R4_acdmp+&W~4nBxde z;pI70##wwE$hfIrpx@VQ`Yc>|xSP$S8~WoVKTg5Z*KMWE)Yp>$m>ZoNQ(u!z-#`mL z1jJZHKZ}Tc5Ap^(*KIg6ol~wx)s~So91kdWaF2c{?F58%EDiT9uV&xYWvS{aFS{hE zg--eu{(>bL!0h)=md^{aR(APus_Mr}+}|%Rb(>B&dHn3fw9>d3rkDH6x0-@)^Dkwj zjb75;-8>7gmW&$y_4x~rPX!&!>l3d<-kfo+g{PIl%s;UQ)Y+u z4&z}r;Sd{hco!{2a3}F*4CAcydj7`#V0_iRg%G&NxtQpm=(5VbGfiRW^NoBJ1rPE# zzYktZRk7>`{fdU((V`a+T{&n=cnr4LaS!S|hDOtXWb>_e-LwH+@FmdGw>6+B9J6~} zcBaNb(<-c6&|ghc-%o3xG(Op-q&pXd1CfV zgPNdKX~vGy-LS;4Q=161sLAoMaXGG7weBcT%KmWHZ${+6bC6yehCjqK36LdH>fR!{ z>Xe}eUaWsRp8U1&?E`K@0*oHDY-p{^+u0T&$b)J}|G6C(lSRuN&WgUd(rH=0h9hUz zj|U@1UmNWdbn)SLk^KR_nRxbB`hNKP>?@ocdEL;;1l||Q0{~Zx5N5FT_ z8{|xM9~@McIdv|?#WPK>1b&f`?=bvMO>?(;W^}|VZ|%*&C_rsnS5&E~%`>$1I#;~* zn=Wx?omuI3X^Q4D$;n_~HEv`6`Rwl7C)iTwB5O~BB+$PgQTGE~V(6h;78q+*a8tK* zi)1P_7BY;9ea2|o@l#u>z4b#X%;a|nTq^l*V({7P;k z=t-%I--DL{uv#dVtaWg|q`lNci7#N7sC(@vBesWbHEY@Gb4`DozcU20N<=vl;-%s5 z!WzFm74mydG1Hjwdk!c_6!|q+Noz5>DrCZ!jSQ+Yjti$3pBqeRl}Wv|eimpd!GOY~ zDw@@tGZHFbmVLNc^ilgjPQ1os7*AOkb2*LRb{O-+C97i_n z2I@>^O)#WwMhxr4s;^U&se%2V#g)$UMXcXHU)C<7ih`meC7t?9h6U9|gRL%vjBW=4 zyJ(KaCRlNg`fO6a(x7h==WMvQG|_Skr4D&0<8t`N`#*Y0lJn{f4xjR5Q%h*qiJ!9l z{{3xuZ%nm38N+XqLO_y}X{{=Z1sg+iy?Wk0(xmzIV8KVwj}M}&csjjc2tOdzyInRf zj&mB~+`^C>=hnyxW|Ah^U8Pcl0}jx|K^QWjuTpX%S?_Y({asp@tk2!qmNiJscA|3v`}jyo*ALZ(Rr*ar91T`}p~N<62j4RJ|PDBQI3t8Cdh) z?R$X25f31}sp@&0jG5+in zs$WmohuauhuK4uZ1iNJsy2T@EuDDT=`&$LT=jKS^o}44OK5cA$zAzZq&gS)a(=xC7 zC(q}(#ncl6@1^p;YG?lVnJ)t^7Ky53%ZtMKP6FKlx|zSaeDQD~}Xbf@cZU>-AI+P+4hN52dWFDA$qg=0!5}U9qLoblC z?2V$GDKb=Lv@me&d%DST)ouSOrEAoGtLxcGg1~Kmzbq?}YUf=NjR9D?F9<}N_ZiNa zZhdC>2_z-iy!(9g9{n11i3|~!hxmAYX6z9olmC=&YcsiKI;&XK#&iSd&6&{u1@Hd^ z&}sU>_G+y}Gi-8`-k*Exr{a$>MNGj_u%u$;s_fOjknwYR-qt1G|mi}nQ%CB|0Vp`=0tc2y(3 zJ}XmzSQQ~(SfJW-|mT1TaDmxNCml#nWVyhIvX z5(>8xARd*joOU-U;Dfj+E+nUJC25bpe>!0L^f@BXZEW73UVfjT$=FTfw8u@h@$hDQ zVua*ub@?Dlc%%H2Kt+bYLb>$(@roZ+vrM&so0RO(eTY12?=Hk4*qI39-0yU@%aQU) zh(=Pxi6yISqhKQ$i^SEeyiioo-1GNY25sM+qoj*Y3&qp^8_)87sMwbecGG~;>|9TP zREo(Axioj6Z+vp*b2~Yp&YghcPwB1H+J6C`1#2tPkLCkZ%eJSah9>34C6}Wx52PW# z^-a1fn~bY&PC$SE9!mvprG5JAMZ8#PQ1utYB%g4fm*YwmC=|j!Ynky<|7ZL;!BWr3 zFawY3dr};&T$Ip3YmV+)De<*8`l~v0VwiNIPNf3|&X$o&6@|n6LRM@CjYQR1 zWBH=K@#i3!;27}0=N!39tP9ZWSn8M>14nC%WHmBMuFJAk%Lb z3uC1S9h$5}_+BVizP47z7mQl9&0QY+JB+^dI{s zw`OaYK6by8i7`3&)Phx%c((j7B1YUWiF2MMqu4sv*rJ!i;BLj(fq}XbxPz*4fPY?O z@*Ky#cmpT^|NpZ9uUqz`68dgR9jtzXj=}e&QRIn}pQRT9PLxt|PUrc*i*0b!XrG!5 zn0}>27K&TEtQcrzD<@JD6Z~^YE+@bp^w7O54P0!hf0Y2>E)Q-^2GDnxCg+6##J=z7 z@ngMS&`rDgl6d+JcSuka%Z?(3I;F~=S0|1#j5>jeKEQlh=sBqfv!hBN|;yTWLomu=my`^LYikzJ(>0epsIY)kU18UXtB-3pcSlnHT_D|^@nAOvSZ&U8G z2j{}BU*x=`J<)n1d{C?*L9G7(UY zOa>7`PWnsf0_A36hyo=b^S{8-brz>TuX+X?u5rOaa-i+Qwt#GO{msTqNOcGW+e>Es zB9jlrN(d>)QU5{6)p@F-7=X4^mJ_o0PmD`XJxKX3yEPtUxGs`3c=nmm=R})T1N{pn z-4`5~hgSH{OLb&X7JJ{Kc!m~cw^Px|bf;E_^&_m2-RyF$>hpwb^&OK2x<&5mZY$DQ zM*Ba9X2yg~f2CrRi%7#Gmj8ToW&RX3woB;vaQS~RStNrN_ip=L(D5O`5ARa1*tbl$ zz*z9~cch#eZ(SfXecVU8>@a)YoW^a+0f3~j0Y?^-$NJeZx)){fSvT?~Oz zr|rs5)}M)5nL!oe|LIs_Tje3%Izv_8s~up;gZHa$tJ2apK4+*%@ezaqN}(Z)Knf?w z50}vMb<0<55q_7mTNOQDi&W|)caK!E^KS2+JE#Q+@^xmQv>inXC5o`mvE&$TOke$B zV8GSwhlTR2rzJ#_;)bk${WP%Ih)i=EYN8{o&z8%2I_q?VymrtR;v$zLkjrg{wpYbS zvAcy#5)@jAvZp4FuHHU2=>%7yAaF;Pr;R4Fs{JD~J3=fZ1&XUJg-%A~!KmHC3n)>YIEi}NEb z%--g1St?_*DOh+gnZHtmEkxs@isI}eRrc0wU8l;2b@mCiAM#Nn997Q+LV*)|qbtKQkb_f0o-p5pdd)@GMF*DshM3Aa+3F#`qRIwJ0hm)o|YEL#OaBEakx*CoYj z!aPt=uH3>5{Lo)X0vnhRQ)s3fJD8{|J(JOpEw+)Rk z`bt&Qmfn=@fB#v0H(jRr&%qMgqOh#^u@wR@511#rdFm|rRDW^uR0I;SFNFONvL|T< zNgTUA$F0a)aQgw8fuB6MGPB@qT?~BCYk5+Jsf=?}Mb;HKNTkLenT0K8t8|H}D?|hE zSgX!{rJBv{`q@9kgrWLKN$Lc=(eX|?lLDj zTIgDs2{@)$i(H$~)t&t0ljddg!CF6;h;#+vfsiOq1m6z-@3HjZf9Cwjssl8*? z-Zk;h*SQd?Jne_EnSeuFHFb<4o#^De>LcvXXN-SWl?t8{*wYg3myaD#!ASmyRX(M* zGTP9W!pDwsi#ZmX__)rLPoItw3NlJ2we~Weclgdr7?3%+JE=SOCt;iGP}}vJ5Q|LG zVyV6tvP?5JtW=tF&6vZPw&HPWnzz1x|7JWQiR85>W`0|GOLyooBAJSsXr;fTClQ*2 zaK)sev-vb*PP9gBV5`_Qo%^@(nz4=7wneRMzW!+lzgV`U{S>?Un=WkYC)GrP*^Co~ z39gtoderj4l0kRRPB`Ahk_XC*5YRAEO&?q0Mzru!IeuE^lBSp;^j8_6-!y50K|n_p zGMdRWFh-Fi>Ry&?gYb(4RdA{FOqob;0q^4FiX*<}mB;zWot5?G&X7RqtC)_A4|jTu z$#`}>b~R$z#yqsMjRktG(!I2WS~hnaPgt1B%D#`8tL9}l{0BaIb*@{Pzt#{=K}Oe* zDAsQ#vX=-a{P_Eyl10+;FIVppTs>K45GY321_I8QO(l>aZ1$65njm1IL>Tmd^bv>K zqvaOE2UgLp-Yu%rF$JfIMhMuRr(^h3Hp`{LBoH54u5@YGjy6Wg?Q*O?XEIX6kMCO~ z<_kZcb1u98AU{a8r7g=xIgs_PH3)hJ5I+6utGV-%RP@*Qi)z02$Wuo9%2dn$3FhdS z;i52o@P_mdzh~c5s^ah~8Ps7Wp+76`e#%y5agtQuPd3{4@zh;+PJ;Ul(o51qE_WV^ zg+~a_eJ|*Xi=4jabrA&e^&&@I6=VSbgQoPeA2W5wnF#LY-O>}Ljj#`MCRMaV%vO{76cz-Og(S_6~uR>qnR(*x+nLISCR#;o3%W_6?D!w;_CpEp6{@(I+A~0_7 zs}lPdr=NoC&$L2h;r!KHMBq)8eU7#yV&?{?? z=4x^BMDRXs3k2G`S|TGIzZ0Hg;o-%T^9GFBO*20Lb>W?krt$`*_Y)pIqLTXjE~di< ziI$JBW{M?JgMOp7XK0RqD!` zyjnzWp^?d+&R3;V!S}YBsE3^$ov%4ipg*$x>0&cLpey(^IE*D!A^->G&P+M7+J2(; zwd>Ep{Zo-~HYh#S%R%s38W8{Ca=WoD??Y3{$m(9%xV*`*LEmoP1$uIW>TgrB$+onv z_ndvbMOIqVFhw~TrM%u2A6A4v!m5V5;SK21dr|_++u|ReV)&#sK6$=&(H*ZZXM7U< z=e@Z}9GCKoq)cAQ9euu8+|}amPkIa3BNZHT6d18a1P&$d5_02Ht2I0xoGDxi-;5;j0tI=XFRNl62_x%#|RTOCW zg*`>@ux)y<;|r##9cIl^Q&4#~Z3CkHHz`X=;xCJy_@caXbk+{w{=u4_bgn+6>EKRa z8dA{~?4*L&vu;0?5LGS{cbn;+@q!-7usGB$?e_1K0#gE|Ot9ixD#X(4>uu)f#}~A3 z3@nGY`HD_hpAqWw8U%*?yVSuzvJm;5G+nq@Cd+=}W!n*06lvdQCuXal{9Xs<5I5oC zcw%nh=Wg?~Ugk@T1@^y}Np7w%vxB-A9tdKDt{<)FX^ubm$7SZacAr-%L-a1JwG)#C1c0gU_I^Cd_qciW@*(2ezbRpD6!<$ zQ+C*RGs|w;)ZO`^revsDl);H7f(3E%K@i2Y%eE!3cq&}mnmjtQ*Z=hEWe2W_A^XH?Nys^bJZp5h>K5an>5p6yjNY zREWvikLx;$(K_`V*R=<8<|J@62`31~=7iCV$p6c%Lg1YAc$h-uj ziA#pcUoF0HIj*$$+!IpLE!H*6%e?c8aHZ~W{8>f@QlFmqcJUBtER_3}jheE>hx}mv zf%%k^5;hsmrzrQC;sDn(d(nBjd1K!gR*&*-DQ4;zv;)vaatjg36nGZ?Rq_l;c6lQA zQhH0eWpKygvHd1%l_?G78|(|eJ53Tsg#N4Hvjo0QDebJQL;DKH#&_8b>p%_AdE^@3 zLP(ASqIYgP6n3POQ=*_HPw&ScHtu&nQK-?0+ z8>8|df?xb$oR$yQ8MoZfbQyr0elR$(MT?`-AAlb&Ga4F{{$^zoyi|S#Y2?CZrv_8g zaK5GIo1kiS5{V~y@0UpiT9TI|Vx*t!eaK9kRthIgdFvr#q?-1&t(a;pT=yrB*xZmb zYw8R5P*fjZoZoV$hSYocS7&0+G_-lb)kFC+Q>p$|lmq`}9KRe3H$HuG_y|Xz*Ykic zBp$CVTqZL0olc9!_rqG86IPu{8Iq!Y?GKoMknsM|jFN<nmkWW$R)0;=-v0xAm_otSVoWlb^RlPVJ7p1U|d^4=E>-zP*-Rmrv6} ze|&GPS7f_&uWb1R`Q&)TSwU~0v1a<`-)o6LgtM9rGA0LiJ@Ue`$XcxSFf)nQC^6NuI4*n18HDDl~3>VPbX+k7zOT>bP zjw?xBP7GAvQDt>BQx!=@sw8)=gBtaH=3ce`T>Xns6feL{J+BW8)Q#=W-7NmHaV*F~ z>UmFhh7MkTGy+xsl^XpR;qG_do8Awha7b-nS4*taqw15O=A{`zjy!fUT4*O~Px9G* z&%KU#?o;#N;>89$=?gplzj3XFNdj^3RMIHRL=~;oyK7Quk=^>0g#CAZ(QGGeUGLU* zWPaROHN4T{eRhQdB8Y!9jcDKvnUVfi)uLU;QxRVsz{0S7@3sEf+Q?Ls|HWY4W83@} zlSXj&#g|UeKk!d^F8}ntYOtDT?R^m4cwFr4JG~o|z8Zm1yM5aW({Yy@f~BU11L!v#Td7eeD4W$>lcjaG!42YE?~f3MI=4r% zoOf_vBji`oQ?lj_PxRf%pt#H=+;A1r#K4^1?Htf{euOeDW4^2m#LA%gz+PfcvYKB@ z{l5(10Q&Plb>;K9_`Jn-xRvcD^qdB-b$9yeMaHX`lv9~f(0}6fFn#1NHFDl)U4XX~ zltY}5+&}s?L_h~eET8)X6I%nfweCW?o!6vD{DiG}w?pr%+YfFCFf-a6yId6Ra|pe; zDl_g&Cv!gUMl0Z_t9nh5KE)coN>{ zg&1(j`%gkFBL`Uj=dI12!|rM*w?!U{waw}fJ_H(zB}-9=p|eJ;sfV<_S)YhAe7eDS z{-N^pB#iLATr#NLu{RO!>S;pwW=9=;trCin9igtoOlB&izD{7ASKh z(CzzkugUVut^bL;3>2f~%R9WEhM%m4uk8P(3g_CM>~SJy%}G!J2{hm1T1XXM;$Nx< zvJ>kKg7*&8803!xLR5KkS8}@!TpVFYhM@Q4tv7{NMwN?-8Ku8G-eOxwZUgt(3=6ku z31x;jRmhmiv^Xlb2w?7W5OlqdT#XaE5q-_MGSi%fF7Ds>Ic$5Otyo1~V#Yyo$>HZh zPZe}g8O%F1w+%SQX;*l^WxmvUQ&N5%JYQ;hfA9Y5s8Xx?TASV~=_EpR32`iLB7uC4Lj=X$lBnh3I zAtk%flc?{lm>QjJhL6FP*IzJugn z5FL63L);PtTf0G#iPK0T&aY7OESEL@kG;N>SRc>->6$NM z2j0(*rwMhfDRh0gf$lx8dvfpYx#D2>k7XT8!~5PqGifS5zl^X|?z;dW>t6;)d<#^U zqpau3c!`tBk%yTSPM>VZLXi$PMqeV1LgvwnFtkPxPgjRfvVg7ax0Xr^R;&%IPtWN` zA5SCheRx72%iHFEbeJaExY1ElK+?^&?iS>TAUdMBcMr@A%n{(^2RH+ud)j7?B;I^^ z7rkfli|k(%_b%e@w{>p57WU-$O{YdI+TV+mby<|-#*lt?XmB#+(b(wfKEBm`AY(B} zAZnYZD|DDnpBb>>Q7ZEq95BDq z&uh}x=%dYlNY1S?M_&pI&)5JYVBPFYqUc-8!Vem&)86BebiW?QAtFDVy}0NH26r_( zC_^CO?cMW|=e_!Nd;`}}wIe#2rjbs;ifve-VvB7)GI_S+Nsq$S5JY$8#w^grTZsOb zUyoAYclwpn;7>Ci@(v@DI(;8$4<&tHXlW*;hWslB|D-5>6-zKX+2bVjkSQ8?!9MgK zl=N~I!}?@~Kx<^NrI^q0srRS28Q~9lflYBLXVmE~H-TOQPE~(*4@#$PheP8^EAU}f zm+WSP;g*ei&p2L;l@4F7HzwvVyZLh&&an%n~F2LIKZGsoGGdXNS^^gkCKD8wC{ zOn978*5SMH1Cf!Pil1ixa+!!Ro4xRSy)@zYLPs7Fyinlr`RnQAu(hV9V3Uz}C;^ z-~Y9jxm+%8+u;v_3xQt^9}E{~dg`y&k_IL-boMLUMr9GA>}o>^!B)g*B8rgz=En8c zEK9pm`|y*X?2q_#wSx_BP5}w*8X6!2tqcCUtG(2FdmF>*`x6R~l!xbak@?Q#VXxG=k(YY-43Z+D2$B08B6(u7e=DG~ z*%5MY)s?k;<$!wd{Mz})9SNS2BBclkhNAYGR=Yc9eI@Gtv!DgL3xps?>l1#V*6K|I z@g6biLi{Ynk8TBO%+c=d^WA~VrcEsG)?TmrPdXwVR*O*orI~)IESKLQEv<$euHRV0 zUPn>T+x>w-@sS`pGlN?9>_rh7SfhqmoWUbl!t=cqsYqT!VHZ?eccRCm5S-9?!v&=- z+Jeh%?!&){ecKh#*;pOrlRLHF|528F&6}$#V0U~vK(#a_$BEQ`{zWkUKYenVJE9>7;rk|eSgj=7Uhnz3xm0Qy^^Hui9 zY7}x$DkL_sWncCgDbupk5VZMn-;o*FQ1Mt z2U`xQCp(2}Bg4`+`iC%H9Tf4sY*L~$W{*be^*Y%4MZV8(`SR)b@`qbsSWL5$uZ%GF zjM=n+$!a%_F=CE3MuW3+McnFQ1MtXU-E6p(YrX)pV>Dqtp-+cnY_W zd6t8G6`!Bvka-in3^?bveED>Ixf3Gl)fQG*Y`aenBlz0qAXALrc|ep17;{X9@R-8v zbs8||w|x0@eEHTEGPjTjRUj%~kJ_aIh4Cph9?uqYMFN32jbQ<|1u4J2l3al~zvauP z$SrpD^VHWJ3&Q$?NSEJQ}*?%ctYZ@oc|`spkf7Fia_oS2yFCcrly1 z1B*s!8Iz$^^q*A|3`=7QzC4t=pD)K`zthg^Ep3E}5G|MBU&RLp#o|IPI}ghR$q+u@ zJc5{|sde-oO!?>VTH%FCKcI-(x=FE!a+1wn)^OP3S z(e#KhTllu^uAeWD&p01Gr5^Y5;c%fFa$K72}j&d--OdYuktp4cwI{afY9wWwjpF#aIES^M$8mK{XJxHGf9|=N=EJAbe+>37@0iVs&W_;h*kQQ?1r-@eW+XFHl4c>?#k=+r=%NW>Ns-Y9A@!k)T?e6*WHg!^ zZ*0Y^BoAG^SUXT#3*y5Xg0uru4D^-_w7Ja<7f}O-7K+riTwU5)p$~=j{lfnLnTbiJ ztqb?QEjgM@GJobA=9_=M^Pe-{{NpBw-~L>F?&eA9|5hLVo9&$cPoK+Qju$*3*X&2z2QXa0Jn?Fjrh&=BsW6$h6(K|%>!6&+!pvWwM{YSE z-2liDar?!20&>3lzSo(znGVlddBXUF`MD5V%%BUKj&q%DB? z?(HOR|MMsL%d7R%4K@2w_Mb<|Q^^Uhgn&XATZ;2|AYPH?##y0*@^LUOfpalPq!6JvF303@uKISoQlV}P z;dN)hq%Sw?ryFYaqwE5Y!yq-CZt6$H z#2>jt`9vS*VVD%krkk(_CHEw{n=AF@X8p8Te_pef?agkSTuDb&SHOk(^L9eyq9lor z*!d1Y5E7ImLI=ua!rZa?6dV^A1}7KA)>ih>xDY`v_jyH+B!yE9gV&ovv`fV)MfWhzOU)&HxmiDL)}Pnx zy8SCjpR-l1*1x;@QGd?Z+JU#FR!L$ZLW}^hTu4yAh@yn@#CC>hw6)NkH2692`O@_X zew2#*_2<$AS*3p3tUs^W8yf!5EHv``gq`TK@^r`*qK;7+j`0vpxpx(Yp5vD$g-eM9 zH6}_iz+3_=Lp3!9T4*(@5+yFCWwqN^Fip$M%(wVx5R#GzQ$J5ljbNE2WqEdanY@g$ zu#n9z9G3g#<^B8jjTQHY4oh$-iHqcKEKeMcz4u4{La%=)7%a6{daG(5?Aa&#PYOXf zh(*(6@=2C8MOG9gPWF`SH10itp@(GrL@D{qK-xH#q@m^9#<5jU(+%Vb85aHSqaLE@AhvVfD_AhL| zf45ltDTva)W|!2{Sm z86>a_1xtQO>^f??ee3bw!=voDab>}uYT0#Y%du9`e(>NYhh83JWevavq&4tvcmd#d z;_(p^-~jm#SBQ@2sfOHC z02lPvx8w_uh2!BT_A)%xW$S;~Ki&T6n&S|1S*MR69`L{Ipy8nczO7)95$-tB%3$2U zd*s~dA7J10>>uCu04Os918r@$0P*WMeK>5jMAh@O1%{n}WWo%C-6V9DbE_=dA^3$v z;=&0(5DPo+ljeOMpEF#a$)zYN0HaVf+J~XyG=CjMy90W5)~h{-pd0i8zCK%x`Yd`n zK(4#{!m{D+`j_%&8Bbr$ID<6}(a6Gy{ft2J7Iu7JKjROc7Z9o;&2Z2{K}W6dJXyxG zWPkS|TMhC-R;OdAAK!qUvB@Mux{Nz{)tT7JFeV`qmK^`4#L|A!aY(Z zaXnwzl^OErpkBLubZKJRdfmO5Co{G%2x?@Qb{mG|qB!qc9iQ|^#ydJrbay9CA>?1f zae%Nz^5qyO>Zb!3wO9aiYuC~eZ@1sF542&fQ0zr}DnZvt-Ej2^*wM>@Xpn4X&Ax6x zj^3q_y~U4m$C*7o)K3-1wcLetu|!?CmVkU);Bh*Pg)FRWKEN|l}@@xnE+VKi1y@|grKE@d29@hVW94nddvm$4qF@#)iA38?`kMa(2 zYwTE)C8**5;vjk5s9+S_|0@ts!2e0iPma&S#*51^=serm*Vs>^+9ku}GMrO_zSE2N zLeCi)PjsKS-2Lz4)Ht~L7z+a;>_RyPM?`hUC>Rl?t)a7BdVJ2?r|sk+=H#KEGo(#& zZW*p_5X@n?UdWo5=92Q)dx8-r=HGd__BDaOFbg${6W zaB?IT;lI3HZAe>L8kYUhKZR}xNvu)P^hf_V7!U?*tOKbv=?^6{11&C*FmiFa+Qv+@ z7TuBr{1{sGj^3^$5iF%wRu?7}XP1$wRwqA7M_Ee?L)mJ}^v?7{7=|v>|Al>?_axO0 z`)^@RYQE07_w+vJxzGE)=bpS5m=6p#whwX|*Bx~(JGp+^cBp%CA>X@EzGo?k?$@gM@@XA3JdtC;1BMaq#z94|#pA zSblq+=4^r@uwC3NLk-o3i=cwX==$aF$juKEYOkB@LO z7Ru4DiFqxeK}|GB3gE`WD&pP4-20>QyG~EoQ+-|lFE5`t>DzEHBLy#Z9w@1G%48NW z4Fp{9R${JLU#Kz(+d1sDLs(*P8P~=FjiqaTe}ntR0cRE0Paiud(=7|WF6K9%o~&*` zcr_OfXP{w#T_ye($O-!CJ-WlTZ*J}r_{;R(FYiO2PYLk^_T*9^r?R}9cp$nmk)TxE zLLpP%2;{HliSvXw)n`_ot#Y&k@&p^-=P1m7357@`u3-dd{0QX(?jMi&NMt_owo5|3 z*FRbQ1L`B1uw2QBL9`9cGBndP3JQ)x?&0xgGBwP|*TSTH%uha9w%}Mi_NO)kopsCt z;=F-KhpRpVuFnPrE0P2CaLM~C`vWxqiCa z)@^h2N`CV)-;8g%d}i8HJw2X*q-RD2bs6@z0&|KP{-tbg?pOHJ^6z~N!Rd3wLBO$S z^XlB?I}nt%ipoO$T_Fqr@6Ha(vz?t+i7f@Wz?Im3dH=a+dqg1Lo>xfI-hD;v=LtDD zJ1>w&G!Wb}*b)8+tQFA+`M&-sX8b=H*wGowqLyfuX_U}X1aW3DnI#R-NCv%*Pj!=2C7QHA3)eS_FkwD{$YQAhj%#G^mTu*B-j@lfSkj3 z^poc>p?)_aRqt;;}`z4RAb{PNh?NI+sq*GA2=eIP*7E%lh$h$p-J6 zTv%Li*t$ErJGuTGKHrT7KVTg6w+F^JnMHgnlc8X!Y1rF>9YegHyH#;ht;kU+hIMes8y?Bjt{=Q~0N`J=28lA*{@BFxf?_V00KyGLc zZ!t8Y6OU8Fump1KRzYqU7>Rplr7P*iDnO2RteG&496k42uW71pli)@!mDYiGPEYHz zvss;xd*U^jxlu4~T5g*v6i4L3x!SVMHrp{-e}03%PyuZbbs`2@8wA5c6|oD!%H)ON zCa>2XeDX&?-hZL5qGBvYp@(xG@WX>|a8^aDBtJL&%tK{7aX5v}+zO&DBQ4|A>6bG(`TZ# z#t%;m-+#Mn7y>yUeB1c`r%>W+0;pyQN~bEcll z0dO;&0@kxSo^;(a2ZABC$8ooW$?$@v^dd}$sMr?UB)@sI%E<_*!OaUnH>boQzc3I= zChIHVk~evWKeit(Nmd4vNlu>M0^GN@#H<4M9;G?N{~!BNH))$pu}_A84zGYu^bDV0mm14lT~SlmoA^kU z@1T)|%^uvM@w{{OEZPX<+`iEGr-zhaLeBjQTEF##Q7qsqij4$vZMHe8|-k-8PCs6~sXt@<3^0X#ifJ zYmAfRN$PmA!`syV!4tdP4wiQ$JNkIFA5EYwXd7@ti=auhPDut>XRFK8MPGDqE!Rot zOZ7#ldYDe*h{U9xj6|jkl15M9Z)=MwqKDoV1-v>57)+cRO6SNW92t%_ZKebcv*00+ zh{Ar$c=+b=t|9Dvw_bboV3YM`PQFz24}X2U{pq{gt9n?#t!=0TWWvl*ogvb1``_9| z|2e!*?|%R6`=4`JAP%T!iMFo)0<>GRt-rK#D&;&Syo-d}DBJLr`-F##e(Lg)-+Y}rKBaBHumqDMK=C9B_F zbjmb!IpS1`Fy!t_OJe}Be}msy8?CC9{M~t5XJ==f4P zs|jyy6^trzzoPUe!!NF=Q8+RB7aW)HNzUF>+RWv|JxHUZ;3TB!nc-c^)Ct%BSx?@I zC>MIn3WN9hf46=q+e~h^egS%Cv(3$|&0n#Hg&*X`TF?3?Dpd&cCR-X><=ZmswITz)b-g- zsQHweYoeX&QRlMC-_2D;2Rj!&bSyaXBI%OZ;`2$l?=xI=YWu~J>N!LSaX=2^PR_?Y zO6O0|tG!Yf2EzVVIY`oqq>_V`lNlTz;ewUr2KTbx-AMfU)^1L@B(UeDw;(`zj{5M*?krKO|L&2$Sxi)o#+n zncgm~q*C7@`JV5o_kG^C-n>B|3azO3xLkTX&ia-=$o}21SrCi^<^Wntv@SlM$an>| zsxUEcwian+o^b&tE-nx)J^2$<6;@yh;lnd1EW~VYpZq9n|C6^5U-7CH(@X#7XPTLJ zKi@#X$DiK)B%UQazkWRZDxH+?1vv4(uNrsXACLb#o=jh-0d(WE0gBtrrgil9ojoDK z_m)K9vlLl^4G+uu@ggYx$C95n-TZyT_}C6>yz@4jDbEVmnMmZJ5MywiiSwA^Fu%eQ zWFXG-nKDs_J%8z5*AExwS^6KJ9_KAl*}wZSP#@v z4OsJ))wG(nW!uS4AR6$|o6zL@H#G{q^A5Y_P^u?qMx{r5_@EDnVfSSytzg{ky{~EmH3< zISG2j=?e(ZWr7#Mfn|ZYNne@+1LX0zKLi~0!wK_OHn}Rk>r9v7^$>oWr#54tv1AZ-) zPmP)NvCQ*~NGm>gNhhl73+p!(|lwi6D8DHy?kYV`#y z9(4PM4}qQU18+e6RX9}m*R8G9?XB%apuhNr(K7be4KX`82S9; zP1um;k%fPd+aT(Nf@RqS<9$^802Vc2r7hmE1p3(l5n zFN3N47|aLpO=z)8Zz6H2Y@90&ubB^pOwc@K=IgVpe}2B}e%f=3s3;yM=%W7I)%V}@ z?_OC^bCIH2q)~@h_f;g(&wRW;jn7uC0`eCkB(843&A$kU1W=Vh6fSUp0m0IeD1VGb z*`Hzm16P5V@9nGx&H}@YH?LRaVKp$tDK?L6!6%?$+nhQKC(+=6FASA ztfDNRJ5IEOxf#;nQS*Skp3ey70>pQPL|>Qn=U{ucG)W~i?BC7$>2OXh!k_rsEoXbh zNzvXC>8}s_csvuNkM7B9Alf>ME=h|h8wBoDC*IqJMT<$o*}S9y#1W72hhyx&%XmR< zhTJVfKr9)}2V*$i=@bgs|Hb~}&hY5t@CcRiaQ>xf%0ky1#k8m&pZ7qekgLQm2sKi# zn`0q3%8hX8;S#7^irtCd}uAhI4M}>Md9A9L0MApc=UB@7ro?1Tm%E- z`q;l4pz}jSL=vX$qicb^YdI_X`>p8Sqn)#l2%o|1?C^=Y_K|S89RHys=WdWywjn2P z$juTI`#+3#q`FshJiC;Z426ZTa zH4`AX7TeU6Wo1UVPp@_v+stDzHbY}r8ev;%wY8W0YRjQpkAvwRkNDXqe;i9&0_d*W z{@sxkFg+Y@5AdPDbt&61nZH~))@PP=!`{!ShA-6$Lx_V0#p%#reg`w<}`0l9$Q+4@@8d9r^X0tj&>w3wavvd2eQAFk%q+^7nQ zN7UQ?<>SNov)Ygel`Dx4G>7}J)(i3u5QF>-*sFz1VaKs~&l8Gr{tY;;+;e#0OL1;f z6G3SzMeR~AXP5#DvL4{6yT|%y&wP(p(d3-&clBM}exJ3|cl&$i?lXru;607vKlY17 z6};!}Z22laDw~K1TPqPtEoY_DTH;I2`^y-=`}x(!x1axR|8m##L0{ay>GB>i;Q-jI z&u5mFHU%O6S}>TZv-U7WII&B7V>85i`F!Iq_Z$jN#OP4-=2vC{#)VF_z7~}AMNEjX zXb~6AmCh16e;f{DQj)zpJvn~xX@BoraiD(p9X~(fvysSvGzqH%JV(@AF}%WYIQ=hv z{L}vBu09kS1WK2`c-wC_U&3OKcm3m&U045; z{@&kyEBbpwzCRv~jKCP;5@i}6v*dh6N5aLH$}9Iv8~^40)- diff --git a/docs/docs/tutorial-extras/img/localeDropdown.png b/docs/docs/tutorial-extras/img/localeDropdown.png deleted file mode 100644 index e257edc1f932985396bf59584c7ccfaddf955779..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 27841 zcmXt9WmFtZ(*=S%B)EHUciG??+-=biEVw%f7J?HT77G@f5ZpbB1Pku&vgoqxemw6v z-;X&{JzZV*cFmohnLgcd+M3FE*p%2vNJx09Dhj$tNXVWq2M^|}mn)^e9a~;bs1CC4 zWs#5?l5k+wXfI`CFI{Chq}oa9BP66(NZK0uiU1Kwn&3K0m`=xIMoxdVZ#+ zp?hKSLSSimjhdEzWp#6Tbpr;2A08YY9vwczVR!d;r)Q^kw|6h$pbtRyO;c2US2)Ho=#3q?{4m1GWOCI`k&9;zl9YDhH|l{oVck{{HdF$xGeh(%RX@ITa1V-QE4arPZ_3^N0KUo15FS^Rt74gNyU?f6HsD z>zmu#+n1LY=NIRf7Z*oIN2_aF7nc`%dwaXPyVf>#Q`56+>svGPi|1!&J3Bj8*0u|a zE61nDOKTge8(T{&>(jIU{?5$PF)%N#t}iaHQc%;Ky=4F7L{Hzy*Vp$Mj`%zGZ+7k< zCpRC^+V1HYCi6}{?rS`Ew80CL%d5-LF)(<1lJAQ_QE}I< z?$m+XE%JR|)Y|g5*Z=3YjLfXkvht|tSaC_|$oh1*A78S&%grr-Q|oi0ai*n%^?I3Z zz4Ifn)p1zW0ShuJU zjT*W!;4n~Y)3m5E=4m0n9;cN(k*j`y5!~j2)ij4x1#tx zB&it>z`(yY6BF>DU9?)rvOb2G!4AbPa`$!ju_}{}N=X3%ljy@XN?Dz5W~L8#vn;(% zS0y`!_FK8bT{5iuza9iPzyFntcC0hEUgCyxwZgrs_lXv54ZHujy!d4_U`~v!&Xq6w z_%CfMkDLt!D3SDYg>XEZ!YJH*s~-dg$LmS&Mt_;Y7X9a!>IDr+ded%2&q%}2^ODhk zoJMHe1;<*D7+WnelW=pb#;#*9m22_D0Uy+B;{x z(r=4T(e9>b$HL=1ZhtTnMZ8m?T*4WlE1nANJoY~M+S`a~oAzPxq?IY|K;|faC(Qf6 z6st=g2Oa&+>GJF*AU5<{Q1pIIjk9IOz}i1XThs0R)dBg}u}I!L^(JejuqE{$Bx0WH zK_L%2hekVKCo%({=C&4>8XPbm?HVjtj7;pR;Nl%bO7u_%gfl5w5S;(8b>qCb9KY=2 zcH1B8#T*pZQMR+_zF|mDvyu5p%arE^>?K|9F#FDuJCyu6$KPjjPBMq7j0f$|h@y!QXH+UdeH3iv*9ArYX^V-S2rxolaBRROkUH4!AxVghY-$mqUuOg%w5X}J1K z3LIKED&GtI+|Bu|l2OgJXS@ z##5m-UU-??q5BVBs3e%jt&;*!MXilSO_r%{gmW&qj$2WWx8M1Us?Tzp=Of?r=^y=m zDDr>5Z2+yUUf9O3Kqm?KxT9VJX#G6EP&E+e7EkxJF5QqcBPy@TsIFiD!!LWKz2ftR za<|^DinsXw>aBe|0DWOEi#5cV&B>!$i8?+vTr3ZDMK}XFeg)Ime5=*V++LLjj6sSf>5d+I|6V|cU`LfQPC z;p|(TN|j&~8CO`*qIi-79281;uL=cj-kt$ zx5MwWh>2LRlqjdUEGgk)P@$`Rs3-3sSlqxdxpG@!K`;a)V2m#wvau8$FIZuT9T00v znI8L>LHCkAZsu+5PUedUKs5fY2Ehv7Lqr}Ue$h;p6jBeeweEDUn2p#fwkvxk%Z<-6 zlgcD$>a-9H1#>^}Ku>>wLa`FkP^$V?ys$YQ&1L$o#0R}|{e?+I{K?~0CPz_*Bh#mo zh#!|PeV|ebfXa=JD#~>$?!*)i)b@eZZ`$qTk#-n$b{Cnhx2wH9N;PkqOwfS5FPe4A z!^5G+7=f|QUkN8gZmRRF-gxA&%`!7|FLGzf?uPu9E>P4d zrO@YSB$ z8Q{^@GSty5G&7xHSPy#pErSb3Yym^l5+QhvVlc)ItslUVgKOTQyYw8QX+2%`A%uhb zCJ{CE9{zUB(&-v8uRN|49S2Np{L4XRjFWz9R?)%ikl#d@WJtzM$=odVE^A1_CR5$l zs~b7y&?qM}RqSq1_-7&^wqiGh$yZuM2alHG{5LL=^QiF^u2prn!rcZ9%AF_!mJaxS9)8?8ha{9;`m^(Fx7`o(9*^- zI+OEv7<`;JEbKrNAh#EhBOA3x9E1Hr;lS)5pbY@p_LBMGn<&!Nxl41i9>dX%V}P+N zR;}+{G5WqCjnW#@f9ZNd^d5R<+ViQpx-L3$P}Nkiph3->K~K9)Sw$@INj*8YJLj@f z*+Rh+naB!_+NtSnzwWfLhq1;bmSozM80Xik(oGSLM*c)>iC_Wvd=JP|df1=roC3iU zoG&xR@$6d-6s0^VR}3V5OFQndgqfbboOay9Tf7RQmygGWgZ+DD(=|p9Aw+)O_j8?HRA#~+mIn^!H zQ6fcNW1FIjQ#SN_nK%EQV_F{VV77VfT5B(ea{vC|K#&-RTdcH#OR%(Mr#R1?jLzzq zSC-hN{(b^Ik^Q{uB|gq70;JUnM+#nmHCHA@PxC-sYqdnHZfEu1VHP*(8?jf)TsXH7 z`d(w{qU>V+81-UywGHL+AD7SV`|6-5PENL9RC02nnu15q_;*RRA_g8|!M(z88r&2? zCYs;1K=%c4QceJr-h+O=+K2tbY%HGQfyO1=9--HP5(yo2@2ad|TVK+$67(dBRpKI9 zcTvYDh?n^D9&qCvQhZoHb7DSvql}UJ8B+>~m5-ISatyypAR9WnfzbiDmXq*ctR3Xu z(~YwCAKYipx{EI8!HwsIlC6i`0rhcb>6<%+Cp)h@mK*_1d8_q6dg4>n}&ihP)NGiUvb81U?bXk&I< zbcqui@YB^CK-jFfu@*XpEERc^Mh(aJ)LBA@| ze4m|#Gs|Rc+0u4VvgE2s^$ ztYjCc@_u6&>iu~fe+ed*pr>hTdj(LcVf&SE`t2uXleZ(mhZd7kd|U$5HrJHPQ@IZ7 zz1w#&@Hi?VMVg$?DV~d{6LYoL8SFlWmuiYZxE8-M?^q32JSt7GoOVzZ8#I13;Ax`h zy=DXkH>H2B>%O@Ual0AO#Lh>Z`q=%r{iaZi3fZKcmBtmff&=e!GF%sO1~^L| z<3g?B>etUeZ?Suv6A<@bH;i=|KtG0mk@t4!qPRX4+^*osf+?77qg=U_OjVUxbTvh% z8DC!P=LlXRVFEd#m0i*Ka(b7e+3E&CC^Yv2#TgpoU(C>Wsp4))0%aRYtPxSr1x zO6uJUAMROWMj1L@;~jX6gRh(+e1ZqC_CTY4s&GfB-E;b?6+vEb;^bSE6j9xTFW;oq z9(1ndc$4}qdAB6ta4BN@p|T{**jB2P48}=Ya*Jc5#3mv|J&XRD;~yH>^DLwT>bp@)BbsVm+*3t=;598_Aj{ zF(?v`d_@ky*e%9dvu#A7+LtE~P$5VDCRJz{ZCt3Qh5aQ==>mF~k7bTCZxZg$!jnP8he7?WmJYT*1>c{*tJR|Ie+ScEevd4@gG>!gnL_ZL0 zKC)4$4wIXHIG~yE4+vZ~gh~Du9&92xJVUy91zt6P+$SZ9%)_wNU7KW~uGu2PF`KM6 z)UjHJQr%bRkMmIKABTD;BRcKhrdAbU;gFURvdg`TDW)T{)k8(vFbmtSAMueO{E8RHEQz-$F2C0;smk?8Q*e=qM%6O z6aGCJV;h1Tf3qvPEYi~fsz?&nlrg71v(eKqA!&F7d&p(^Xy#{`bl-!6%zc6pwsB;^ z+s#(uj7tu(L!ti&l1T51?Zuxg`16)sS-XNZm6tV-9#MfVeX#M39*XRuyFiJrxU@lO zA94#H%u0U~Ea9b26Qf{o;FeeG*!6uF*bYv#%%B^zN~9gqX{FS&&Ba|4AuSA${f^sf z7tg9}O%6m})g#&j5f%_eXA&}AZI!vQtzb=^sQxVZi~_}R^pgdM?5WD3%5Gx)%~qaP zgb4y1pEi3Ut}qG#QQ8SxhEkYe1Iy%QMz~|VS zKNsn5WGa%en;uc#7;LpDxYo4^@zL&dT*?Movr0f}Fry~2?+=LVy&$9SKV5+@SE-{M z4E!tmqebqFV%O~LO=L7??~zNUu90ECkq2Dut+Q$C#QJ*uQ33)=L?sH^oM|)e*HvE5J+C=qp79zhoRrLcNRA%1 zo?(m~(so82vOoC7`kQMWO5~^(`_b!C)8yq_VgnO5blD*sV`=DhQ}{$VtHxJJ@hixJ@hcZ z!Y6lPxZ6KphBnMJ)Ki2qFXY=iKs$GnX#1@Z7~hW~TuZju?)u=y?>z5W?Gv0-coA#k zCeo>mYl2HbT(xw!L&23l5KXaDk)yq}eBc&oPdWOPI`+f_o2cgW5QeU+)?Z2SHRplP z^{WM#a*z=ndtAjrTjbW0xE@*Ir~X+Bi-n#;6t1um9|^H4v%4b8X{_t71*TeupTOxB zM!=Yir}l!cM!GzQSnjS?@tOr){-JXhj8oH5p=g?cX47@jYyLLVq#|_Nsv3>>?X=ey zqHoKr;KTdI-GBAo?{+YUsVsacvsXS>8d?dLdU_)>MB*glDaE}%bBrd^98i+k4NQ8s zc0?8Fbqr&)Wq3Wd=YVyyUH$oZkbSRGYQQj1NofbRth{_t5aE##Z zRgYXbJ@On89x{nXLRlW`84WcfoXw=cPcZZH9T^b zcb#iuU7-qyv~G@U`}AkosbCYozUSeB3Hxyoirpqhcbvd|soGDf8>z48$4OE>XaW4E zM`Bd>uV&vA8~mC0n0*yWn z!;O|1HnCN1ghEB898BR#@4Bo&&oP9!4dcdtLZ@`un@&0 zzvF-GJhEY|FLF{hrM=dB7|h@3bEZZVJc3@GCJk0{ONwS8^g2F0`roJtV2uvN1O)|| zIfYh)=}lZzT`5BbTHcM6zo=WwB7-gyvx+Cm)a}&MT+1M^^h@h5kMVlZF*~3?Y5n)L zG9~s#<;5)1%>+_Ny*GZHAebop+bfp3&+eUH&4)I7Bc%5<40;DxP0G8{l|7Ufj)b!u zw?zWRNHyLJzYlCQj^pLwN#g~68@bp>+KA=l8QJkW-|B;3+XPeez-@9TIs${Q*6_9g zgZY+gF6*%)arn3AJUkn5bhfZ9zut{n6VIK=XKt|=rtOVmc&6zImd8%#b}Bw)vQ<=y zZ*)E`F>yPlf=T61Cm%u&Swgy**c63kVp0V|yM7_vkz7jkw+1H3?_NcbXa2QR`&1S! z+&YBgY5aZe3Oz3Y&y0-J_SoE$OJ?^Y5E^umyENba+t#hf=fjWb@y_QD-S_*?k6rg& zYCqi76Dk6v!l>?hqKLvuFrKkCcX`eYORriHtB{LekCARf*i6xO%HyN*j5mwg%*8!T z_-nF5R#R3`E%JC%un?Z*bLKZbmC(`y?h5hS4~y5*hgyC*ji|t|>+*|`-dcqG*G|Tt zEST8(?OF|TW>rp<0OymrGE9zAlwD*|y}VO>>~H8Z91s2Imik`Rq+^-6$BW;-O~_dA z!0~$@ir)8VZEok*1Z^bx^25FUR#w|5ZBYL3o!iz3!TIR!4dM0kJ3M$Uu6oT8;CKYy50-UD6m_X=r8s9+5$+sA0zy6pqH_&Z@W^+??+HTsDpji* zpJYPs-t|l<_3g9}ngwho*oRGjLvmgR^?mB%vOAB;nrI30-@eap3v)1iCsy6LJHpO1J< zyJZ4Wh4TL8e$;A)3J{xrvG(WSc=))?Jb7Ude7PQzrs^QKFUs80=y)usVamepIs@|w z`Iz`#mm;4!p8c?~+N=@YBv*C$SE3I503HJZ0R|PT!IyVtgvYdpEy__RjV?qXKeZS8 zQn;w-0EHEP$J1*7n@+9+ndkivReVrStsXO#HIyz74ueJ3uc5Y(sVEe}?RntR{lQiH z`Z!qQ;Og%AD&~>mulH;=Kz}3H2_E@LZb@~4srs2{vY?%@)Kl!Nap4D79D{9}Z!`{& z?#?MOm>og((zofbkjOl>6O9@pvqoooVcjc^C-#xV?L|D3rXAR!rX4PzRkgx;H70*D zI_Pqi!x-h~CVp;&e0Ji8#XXONI@+S1=SSfqMQ>WVhhw!ZpqKaFLfG@O*E!;9JweoR z?{TX1XS6B@-~)hQV+wZL_soD`{+?KKnJh{Y4z>ugj&n-b6_}jBe(jSLX6P z&9H{W>AHrLNjvzbPKRmV@tT%0mYUCuBT1kvP^GO=`ICpra+8UwYXrd(pWPuzm_4{& zWk{u~y0Zv8Qlt(vtPO(#zX5n?`VDW3Ct(plTSM;$<*Wqlw`Z7-AN6CITh2!btkaDu zrf!`e&u14f%tSP&(Dnr<9bp(XcXW%tYO*s963nBWA=#0746gunNA6vAeP1s zh3fwN_Xo-D)nJ}kr8L9iLhlp8zQQ{nY4Q$@E9VtETvY3caFqEe?wB~cpWg4cy=Whdd?Z? zXPs;EKDvGsP6*bHo;Asedj+UOAyPE`Cwl8av`E7KMRPx4{M5Nm)na^3~o1fyYQucv~N{FBO$#$%a?f> z_2b|tKXBB$5)5npHFNe?Zy-grTI8sM+$}L__i>e2nemkwx%9r!i}lDhBEL!$_8+d6 z#LJ6vr&OO=-?Wf@W*)yvCLByyX|NQV|ecCy7=VAOB)9BI*Nhl6$m2&;G5gX z7X%M-WD-iH8(`K^IByV*KC4pkE;Q%d_{*#4?^g1OlJz4do+x=4js7@ z4A1i5J{^EH#kWeooG$|j7@#2|@kwpNNOp2q5tS?TUv|0sCwg@^U#G?D|NVyEHk3@4 zh9QWPx@!?z6UooVSfd6QY0LCJiII2vLNZ0~Jqnz~Z^l-ou^A;QU;}AhM{s6oqmA>R zx?|OM=&u!W1Uio$0m&-Ry7O|=MSkJHZ2nMCm3cd2v986rcYhXj>{)~`rp~In^`jTf zFrXGkn7tKYRu$h+~JfC4LO`D=-Is- z`O52#2dQHUn`kg1yFQXPBn)1doD3>%Z#Qc1db!Om^YRfrJIQst z-;fRaT=uTy2I$-qS|{FdP~V|NDf7ik?ZkYCef!_RSVV*5*a4(SshTJnq8S~a`-xao zsx;}%hcFK5ULvK;gHS_-z^^qx#frvEWpEI~{rtfbuS8wSnx+wfU>o`2dC=x3`D zBhoCot?)M$PTo$u&5L;JYCKUEb(v4VM%h4az4C?X?!Y6cb3KdhwS}?e9dC7;HdnO7P%wI_DM;;s)@@Z%bXbtAz>;d_JUlP#%eF{9 z&G?mfv!)Kp4BGm-`S$V!e>YW%_7wOu6Y@dH03UOV54u#?t3zN87%+2DV4y8UA)tjRAF;L2r0P4{}i zS>CSrwAQsVg`0^P+-P9(t8Inr_eUS#5t?4*HluhdNj63cJr5&s250OW1_Y*Veacuo z)0zW>;IdzS14@>TV9}D^5NujBuLsVE+*^zGaRsMzd40GW&lUtN9c}wb{~oH-rn5i@ z8}x~^(V56NJ>0RjWulsd{#z*g#MP3;$Kift?|Xb^>Pq7n-uera3;fa&%Kqq+sTISU z>9I?T5p%nzkJI+%EB3-pvu^_`-K4BPitQJr=<|A1pF^2$^d||Im4!Lx+DZc#;0d%Z zU}NxmZU|4p(!59eAHdzA{rqw6Ka=ssc2YVTy@Kr%TweSx7~PHI0$Ux(MH2xP>83k; zbDo^brmW`!))Eo*!~#*~(W4nwS!=Y1;yzh_{9+ERu~TOO)jk9Zv~B;)rYQX6mHFEK z$FpwAYy(lY1r9y+I7I{>9?geW)UF1iXT09htM#|*5w)gCZMKyi*_Ji;8TO`jkr6_D z6d^;@Cn2~1@1t9zQh@LC&YnCIm}xot2eOM8;p8qUQN8+;{_dBN&^VM~s_~5G#LV6m z_E3xKqtq!foUe8JYAMWpG6L66c?}#MBe-snYIx34#${6zQ+joY8Si;6OdZ&ke9RI9 zhJVE8S27lRcxM1to&zo06ulR~=)s2%EoSb-}Kq8vZm%56`3bWG&{95m-EEyf%f3 zH>Hp1P(-{>oBt2RmrZ0^^02K|$)u`-lkn!CnYo`C98s@Jf)-Nt3YGS7qu+WJ#ig-Q zFrQrF(9BS8SkgJ;+Ad7Nb-pL%EFha^nT1{-?E>u#tIcaiqZ19=37#rTd8pgB7g#`{ z3R`W-FmER}xBCpl>6-zNKPtsGV+;sy5|;j2PzH**0v8xbiA$I)z;nGF=f0kD;9o80 zk9RY17@+hFh@PzHbGN#U;3$|?cr@7<-4>(%aAapZ`iHIwt+VtBy0LH(1}{C)3kg3a z$axD|Iyt-X`@2lAY5noiw7Ges2e_Qy#ZG7g7!r}~R1hs0kXTsZV6s<#V!mFs#>11$)A=<$Kuz z!efePeRv291X1dfQaDLD&pz&rySTeJ)gM_}RHN4$p39$|V&}Hy&}+?dW^|({y!MySY<7Jzg!O zf^s9Ppls*TLgM-SI9c;jdIIB_?_E}SC2dbL5<#e@~e!>h*T}3V7Qjuwb}kpd$k{i8yIhNxcWp5 zmhr}|T%BZqGQI3rUBDr76MVryhwI4_s>U>$O&%JFqpibpT73JynWfVyP9vAd8#TkF z@b21lX~Xp&JvEw!njH%gzR#bLZ(HQc-x>V%ncNiNZVJK&R)GfUJ{=r%@BYj|e?tAE z^QvUXJVicpo4=Ku(9&oBMNT}AFs6q4)YmcNKs}&Yl3qAPrANKvAX)cQ0-_JnGLH^% zib2!LEZ+!2?9Xjt;Vsr#lw0vn26t$134ju@;-k>6A|D<1f9{NA&6lpAq^(bHU;73`4+N|^gyuiqNV6V>4tiHuh2}gS>rpliJMYF> z8oV`hL{!l3Cr!jFuS`U(PLYOcg;mf+q*tapy-Rrq73i4^Zr_D8w5!nj+I0u!FF(jA zaa|Fie9MYyVD zY+|f$aJ?0^#q(7Bv(_Rf>!-!26{dkm`vv5_{yhqlfE=-JnrnR3CE&==9oG^BPJ~kT zwR#L%pm6XWo_o>~-xFwsnFCS-K3SEG*9n3OmOIw$y|;&`Jh_54%d_jy$;Tc2Y_spR zsaIH2IH@qw%s;q1T8%_~*JZ&ytt);Fy%vh>g z0w_CsOn#JW{R5GsH?OEs1xr47FZzM7B-{&lNe2bAnJ#CYkWk}CK065tB0jzXv_Ue+ z&!kU}(r(0*6z9AtXe^RO8lX0D<%I!#-wUlmC}2X3R^;0)cuXyXl#01U9aAYGBNq07 zQ0C`^>CvlIsr|X$a@#JlI=!B?psUQx$bJ$^?{z*pe0X~bm^`c#V&s{0MlZ2T-y>}F z;qPquk(Pkc+@>~ButddAyRL%Hp<*0=QjboBwPSW-PHOEB-@Y}(p8aa|yNnqY5iwd} zMW09Non<@D_S6*Yt^2H1H_*KaVR?1$sYP$fe%28z_TYR*uvmX_{;5wg$t{cwp()qhVL2-qx3)1wM*a1-Qko7WOS|m_n5#TglB_)$&TDF_|oOK~F z5`+$vb~~{DgX@<_1p#;oVwb#0EZ3TI6$r55L4sS>BE@dTA#G0aD>84pQZg}wEWXX` zi!o|(wQ#4Y+7TC_zH2&(JiwOOYq`B)ZMOS$()lGjP?Re|ONa!QYMvwZxST#y zqxy;V%ft%25Xi@T@m(kD!pOvW$-@7ISP-Y%N|Ru>0)+_1!Xqh6yx_LcFNm{O`PE!f z1~@)qX~N_wIEb^f5u-?lm)di~;Jr!!^i2p381+NQa^Cc41Q-KE0Pi#aTB>o!<@$c% z*Q&0@cBXHDTZ2s@7*To0m*BYhWJwxEsgU+sx@6~uz6~lY%RS;a{p~AC-LG>IUop{T zr=uIPav^B@XZ77ba;qQ)w|Dxt$Q-fY!I+bh=a*g~Nhdb4cY<~1N)F-&Ui>SR1l(Zm@ zU~{AX%FoF4u=?X-SNV(5k>HE$9dJyNJ1i`5o7!u7exC)~47YqFkDvB6Qvg#`GnW$m zy^C0qY~lL3`HdJoR6L$C-K(+><84eipiDHzaN)Qv$Lvk($43+H>IVoTphDA%<1OV7 zN*wIOIb>eQ)`8RyzvwEjennj>vn!@tYo7b3bB?40+SdR)E#yrS^OTn6TmN05HqK%l zP)ZuCwf1Dqt9nt}M75{7)xl28WCdmP&nv%F5L&v^Csh6lR4+6qW$%QBQl1y9g2m&zLQodlxDQe5t ze74A-pBpIlCOSp+vzs<1{?Jh<5)t`U7lpH47Ax0o_SFnzt-ale`H{M8h&qB)qshbx7Ad#HNB$| zo={%npyBI&{m}+3+ngQmW@l~dYovp+my{i|_PyEoYucnl>EfHm=~;&)!6SYGXW9S; zu#fmK+2v+_G46lfe~J+}-wMrzj+?*^#t`G>E$l*-E7%bPB)Ef578L#cU|%dTi4@hk zp;+bBv%g-&D%NlYIGgkRvGc3A&8QgDxkHez9M?flQx3A$cKc(&?EFW$uDMSdb(QMw9odi zQA?zO%QwiY&D&*2_|La;le8f+v*;YqftP=UX(~GO>fBxRS{^y4gbh*RyJXj3%v!%! zELfdXKw~e(B^eo_RBX;Th4TrEi|2p2@Hg*5bt%Y7ZIk$P-}GUj)gwz0gIBAGiFNn8 zU4&Na+V|69<~TqZyxqSPaeGkw<_`ynX{4vBxwIX_Ypq#9SqSJ=W^R4opKAeSa3L{m z&lHRtdQy{5Ggy~SFu34>`lJ%Zqqg`)p0E)ulwxhQ-;}L>tXPKb-xTPBQs}1)CSM*$ z)G0-&fr8_TI{4boZwExp&4Rt|u<&mI1_Iy+`yv2(?Zm>&!E#z5*xWy{v=^H#tjEA3 z;?O-=$gFu6kw*5=S@@t1PtJM?AR~Jb<+?`D@ni^f9@rf(6M@{G_~V?Cy-fQf^8)n? zQMliUqyBPjXiOCQo#z#uU#^qooR+z_tHzkiIsIG6rn#gWN}koO1iCdnJ2E?}15?Vb zHv1jpiRE-A-RvipUQ>D1lRSvmj z7W3Og%mVd(!g)KZzdxx03y^c4IMqbhs;z8!D&FY;i56b*oQ6$WJxRAsvOKW!wE>ua zD0mc=bW>_*_Ph03EUervAR2#dSHw8J{!GR_N!df0ZL;vK+=3WRYyZ#GgT>l0+k}~1qIqt zS6WmMZM)!rz7z_m`fK9CHVM8F$z&G%jWzFH!hm|FYpam-1QF?Z)lPOHi8}0f1o9EZ zDHf!)*@a?vnvbdJDr!`&Cqj=g-f;y=uFs7+Jzk$Lqc5IOB(A-BqFIgF5T*Qh4dUC& z&KPT!3?JZJ?!2FGI-p$Yz1pL2ZT@|G!_!$1J@*9lY>pk*)lpl#C(!j;vJ^FY@2K3n z2bIo|a*SE!HzHgWM{6~I(^a*s15DV0tUv$zES9Amg!xeS8?y}$1Z}K#^z*n0>1~He8ZPz~6(W>wyBjvX_I$UA!VL?CFEa)<61QoPZ6E_lJpjc$tmFIQ8ZC{iPDf zO2-9y&-i(=bBR|;{%~gM8=O_tg<9F|DLGA&TZU$Dmt&g50M3#7f)z&Uh;BRwc9Fuz z-1wDw3C{{c-~!Wkhp>&;jVmvmxQJZfG-RppOg1^@pFD4B;*!n~lLSmHhRBGUZW=wL zrq<~HsA?@Fl|25*Z_6NPzj7X+}j+I5Z=nZ2_bWFC7 zTuxY^a9H;EY7yk(wd>FO+r1&Q=A6pE#dPEy^vWSAqgg}SUq@acOCxOw#+d|Qm9XIz zRGFSu)D?W`_1iH$=?m+!uJ;FT$Ox9sW_Mi@heywtUNevsjY|GZ+9y&g$4FCA5uwfk% zf*2q%_Xk{=xlxR0V-lrZ<8c^ny0kflt5f{jx54mj|S>kwam*Tak1b3;( z5uPT_RKvI3-JN1xNUUV?slZ3MO>r6QL6oc6t-jxIO{GxTrzD(yK)QDPpLm+v`7|p} z2gy(VZGC&YNw^Sa`UGiI9uXm!9PVra7Ew3o^o&h~XSGDkY zs;^`*cxA6xHK0$Wic0L>UEZ->|DkX6j1#<+RIHQm=vtR9K&^UG7kBp zohssHdJ&9qvGa3a$c)-8t8?K+cH6&N!v~A?-<*cwix;^Kx->T5?74h9@7rrK!RqW( zo2vJoGt#1rN>*x0wCL^Iy~m|a9o+HOx%%|#GJ$IR^@H56PS~Nk&64x4VbME}59a@h zAqcjHo2qUpv4ru+gtljF5cq0UfGkddYadJBa9qH5nTqNu$*6Eyt0)uW)o4o zI;X)D{>#dI8(%wELz1GF@W7BU?iTh#pd^;0(7A|qgmkyuW5DgLce~io- ziyf8;ON`-an0(auAd<+A^E&OM70amakbMh9ou51y1A4-pKz;ftECew{C|lR<2EG2V zc_YNUU-=dDwpU#60DATW|2Y$&LhL{Md zgU?Q#<3)i(y#qZ1bzpAfA$a(p99$lv#>L?Q)GTy zvV36GhERupL#v>^msU5ZmKGe6Pb0Y50Z_*r_EQ}YYljZ+66G=_SknIB zZ29q((LiBZotu{WaHM14bGk|AaDkw7pRRF+J)Lu6k|cfbwnXs?-X|W_s!|@*zFqbI zKH(l_gt(*O6YGy(ey6N?m_zU{`f$GyG}a%6%QeTyYV_*9CTC!O*p|m9#!SnxQYjCr zx0?Pz4pbv$bbm($)?Vpu@0tzWHsS2>)v#t> z@)vmMMS@d6sl1*mp^|5P{sVa2Ydr|^bT4x;;m;G%!7jv|MnM$?)5Ax-e8U)PJP1|j zw%heI;oCzyygq;2y=EfJqsY192X~vsQkXUXIO-m*UbQ!I#`v`?SW-Wg`74otU4C1v*?+r{tKmsUFh+cJOFn%ei*x1dOd6 zFdTHO)IfMfuFw1>5}qFUpQ-y^y)mXc>I%0whfG<;p=IXi5i)%>S(gUE5DNjBWKBzr z_#Wcq8RL0%$M(|1pAfjAhgbM^y%{*VI1Cxpv0wt>7i8%;SsQ+%*i3Mo@%ohOIdc9n_pG$ewjs26kJ$SwQbo^Sk8@-{F@9Fe^jtAAGY004(QP$Jw zW%MMJ!r8%+p2x)wEYW>%pS&FodEgu=HP#p6`0Pp&o4ydp&i>(Z~^F0082|Xag}ZxCR2>ZQ5t; z>A|WQnDS?znrt%Ye7if=pzl|H131>3+~^IjMyPz5ZIm@Fg=5~D$N*x02W!5TwV`kb z5cs|uy{8RXJNs9M*y;%C*|n%;`^I*cHg&PuVYA{FO+N1V#OU2-1R1gU@ug@Xa?q>b ze*(Sl%OV@%(h7UJ-Bu0-x!o!4QqeLO#F)tNvHiyS;USp!I+M=xg@Z(rv47_0_;K4l zshut-0EL`c=&=BxhuXPiRDTm2%{M?W6#9@tfK~EMaZ8WoQZWLcVe@du#-RsW4+z}g zO%&Y$Psw`fY1m|z2k?BkJbNCMBPap;?iM?k=FSWB*Y9pWRVL?x;LPus(N-8_gAb^2 zM!(Sv0At)38Cm$o>ww`vVSsgov{ zCdYVS8Njokqj9l98H3CsY7CH3qo`^|-M;Kkwb$*2&=wdc*1-MVk+~=0au2!?|GVoi zlb*^0KS?Cd6dOGkZxX~LQMUMnNLwVqKjApVqAuG@J2V4|Fd>bG08(u4#?aCTUfwsl z{TWl42|bHA2xHp6o%d%^K-JUV6R+VEJtB_j^juRPb}G3*dpx1g1>G$4D|Q=s2G}3F z;M%u%O4iu*46HuCLsus<$^K?YHU&?^`|2hfnKp0+1Y(JBc(8|T9J{KMB=@c(b3ro2 zd}F1=?F9afZ~ia~4`SjA>gbccd%Z9QB@zWr+A5TT>sE|}xp#hA#&LC`+{fA1q~Mmx z+3>dUL=K{Nck=f3=8SQ@%l>15p%Xoytnks;MkrQJ`6T31H;fuO#pNAfE-KSZmMP3@ zdV?m2M1M4Ni5x`?cm$`5?d(F2Rn)Mc246oiYT~1vAZvcRa4>RjEnY z8NB%znB~)cz7NJ}j%6vQisQW~_;r>G41dCv^mugKaMV#j1*e|WaXQam%?@nx(d*kR z@V)Bo;iEq2(L+y3>yNCS^$`W~tUB=5o*d2ik0YLVGl&)hCY;~+g$9;+2nOIL&ClSa zTuN#y(f|?&^pdT#|Ez4cA^jTq_=Y?0|BCwVa5kW}eTrH&O080>)LunxYP43(*4|X@ zy@`aP_O8aBMb+LrYL6iH9yKCnjTi~R=Y7B5`2U<|Ki74x^W5h?g}(n)O**8@D0X7% zVv1o98ti#psHl7+4G@z!_b)r-6_a96mysLGA`sTw(Ba-7OH=r)+EA&MQ`L_4tX0x^ zh97RKX4$v-B12RoBIkh@0H=2|>nW{0opXR%ix!QX23G=kLL=*dp`Khm?uTVT%=5qU zl4gELxb+XDu+fPBS<+5c=0N?{hS8o(nA9d9b3JdK`8G~5DcxJQ00$!y=d99=`xY)w zp-=NHMv)Qjt9j(z87hEilFo(355}q1@Z61JoxzK+smK_6!asIS7%bE2S{&+M-m`xqaH!!UdGuQ{MHaAnI2l0j<#hiPzCyfQYWoGe0;pPvFm9 zT-J;f{>>*8e=-gaW$IrStoFN!%a~L;Qa~w)fv1KAARO8J#5#Sm8Z{j z#VBuH3O4+H@pkC~JCMTsw_Q%vgPKQz$H#I*U>;hwTpuL-h7cqpS2-lF(*F7RD~i67 zB&2SfG7B>msr15LAdW>s7Alqm5I~DQGk<7+a$^#JgrrLh9s~7$Xle9d(Mgo*vsD77 z{XEUQAQbTUUiSPIpf#1~#b0Qe-(P5Lc5fhIUulw)PBL~)2q*Ap5kw1*lb26_XnqN}@H)z34&U z?4Hgp4HD1g^PpCA;OR=)fDO?6y6cAq?_jC(#}EdCh`QU>IwX)KN;^qF`M~?}m)5JT zP`Yj~INK=K`7hKcie~x|80v(_XO498{ z%^s9ZU(A!qoHI=zrty!fwL9+QM|?owwFzMRf6~AS2FK|Vrouv>ZbLV&|7K8fNZY)u z_sZaM(dD5>N()A^cp|44v_qzt)7Vu!$_hUiHdi!+Gsi3aMT~4UHg=v|7Nr$)@50{9 z>sQQ{(kob4m;|9pD;r0~k%Nr~Vsm~KY04(B>;tCiYDmM}oAtAst`I3MB8-^1o2*4y zg=}#5@v$pYJIkkeVAjPefCS@EAtJ8tvw2n~bX5N#2M1`#1Ca#)q+jL=(#NqNRit|l zV;QlZ#8SMO5qsok2-sFZGbtrhPJ{>uIw=e`rw!G+gd*hp>*aCy>? zvFOe+_1UcHYR?BD$%7t)pjqZN4t<aVv#X#4^luROO`zvzKdla_cXG4rX=K-zCu|J>K`0jQkZn&>rh- z>q*zkKe)=0ROa|p#N4B4M6USBET+lU%s<_26PUl6swgZeP}E@(*;cNu1~k7XyBjLZ z`HpJ}_F3G%AAjI!fpx$zz!qTGfrip=ZgX!>06=%A<7x8awY>DVcI!75wXO&#Uzb9A zHpP!eJ}**?zDle*Ov-CgAC3N^=C%f#m_;69M2Pse-+jVicE?|p7pHyz$4(J<~(i=wYOGLEU<%oiQ19w`jb~5lv3X_mQZu-QAF5j zyURDVYTRjBr8W-84N##WY~6PKt5@Up{EN%>@?_At1##d*91dmXm79_9O;V`0J-&J- zpK)+*(;)3(T5-M#g*qaET^f{}zKnLz!3M-K{r>y{M~!|6dK$UU0{mKS1)jh089wp^ zYd{j+YOQw%d+yQ?e0FVr=dgLi!3zTw+BkM`_el7$gU;YJ$1KNg&gTayx7TlO%4d!M zt?uykNvryn@^{l4w$F`sbSjz%J*O15cln`|JisON88##nfPU9$(VI2@VJ)y4#^{%M z6js!13fnZP*!`ln;HMR^%EyNq@W#*DCvh1TYB6&#vZSlKwm19H~JQ6?WU;JO# z5kR7Ld^&MB&Ca1I>0t!MCA?GexWe&E#x3p=}c>M%Vwn0Sj)w5+(Zh1v781%P3 z*?dm@r{9L5rIzX@KJW$=;>v3tbcad25&#QagCiBE75^)48;W>{K&Dj_?+f*XXBZ!F zR_V>eQ`v_Q#P&x7ry?n1VXlqKT`eXnzX*Ztign-ZO&3fsm%QACV)MCjOiNwT=Rf@? zyE>F^p~Y9X(2UW~pQF3J5l>#Y@4~0|SZ<;CC`X;(%hUO7L*CnkziIFKcH-Xvw5TOh z`hM3OpEVQYrK*@}CPu^F?*}utYCbXE)Y)67QZjfd%Vop$A`N=Hdo30DIIr^(gHF1G zvq(BMeUX^Ne34-3H7~e>%PNPbHFdm}aWQ!^X#P(YL}d5S-T0_|l4n;p!5Gm?U+7fP z!jB{4W`p$yzKYNU-Cx{?4&c<=Xpg`J$C=E?Pll3-8jyKO;5-)-tLhVDbw&n{oQEfp zof$G!Uf&fSJbY-BLUn8LXFT7c=|_TU%MEA`XW4~ncv(2+JJ8ZUq^W_ev5BP!uL%Av z=w6fluf(qR<`3BpQd!vW)pW8Y%HvP2CAg_7n2!jK^-iTP%`tGDw?^{a6(7LAxz1Rv z3)Vtc$M>Et-r$@L&XwlS{{#* z%?2{~t{;8&ntME~&j1RJ1vVdO;f_^L8v1izz0`GA82%;8E0G;Q!Jbk=Rk*Q9ykP{9 zwvb)l!HhkuHYv7Ct~*nRc}1w4!c$`~1^wOja3=&Y)f{t1-=17-oH(8FS!4=SyXujR zcIH(75Xghz3@T(Jzoi37k;X zrbjpVDeqg4O?>>{{~ew0*i0`}sgF>o_H#p@!M32sD=a(I5fiV}V0=RFX)h@kwli7; z{v~k=mD0CJ@X^Ot(aifPRR8Z|g=rE&)N^HKn|fz(F`b91J~!2` zpdH(30GLb5bz4^RmU)Qg7O?xh9x>9j);4v{eWiVeBtoCjmo1|`ldGQ<_GkYnREV0? zsed4$`tejon3!}p!kRPMC4qh3`uXcD?cG!Wnq;f%-WdXr5n&=$7Hf3o7kgRFmrzTP za(2#kiBiBUD&q6^jT@>qc~U25YJpM&x~wo)d1K&e6S9=jH+B`JWUvQAqO;(17FZBK zcx^2vQ;a>m^3e;)2OBOjk*fw3<-QOGF4nJh-Fe7D@)QHwu-olV&mk**>sJ#6D_-mi z1iuSrns!P{xpKoTmeFUY_g+8@<#l$B09pU8vjyc5#dh9+T8)M76ckFg{#yX@SDV~_ z(eN_~_V>2%zB;6U?-2mK>NM_WQG4enWns>yR_=e-!J)2Xsl~^w{mOUq`;0#r6oN5}O5)y#~?c?S*h_@upl zQSy^#c-Szn|MpDkzu#dd+?fu+QO0NO2y=9U~R?6EJ(#tAM3y9Y}Pi`s}tCNwwa2 zq;(h27Sf=*EPTSC>bujBTN7ViPPcB#Ecj15jlExHvqY+ehUaeG>K1x~-ZQ!Nl=-kn zbP)|!kLykq(9nektRqYaa2aJ4Y+HX~@SiSv>0jRh`im5=!Js~^^?mSxJKTMHjY?v8 zVIE67<#Il@C2JLsypu8oPFN?4$Q&t=oadNY1q>5`q0I*^QX6R zD4HPWPxKb^tRKjS|8J1^U8ka6>G!fSg0%b(KS1{x<2i#afYzM<)w5L?N~eI>r8^bS zwB=5inr;qxZGSPSOpxdJUgs4XN6ekD1eco*;qL{MrcO!6N!%)#{81Sf_ZdZ0`s`&5J~>IzYFU(_%TMg&eCB69q)8it?8MkVAL;BV zxo%KgVZB&PE1{6*vo?tl;p6&BEidXAq~a!gR4^!UgbY4PvXoo}g@|oO-m(Et2NS!F zkxPjdsj0BVqIu_(Px80y`06F@sNN1iwwb6x_Vg18aeQURHJ&uTdSTCpvrO)&fEYq6 z3kicA_FqElr+57>tMvTaU`FZ;BtE3n-*3WeS*+rcB3msBs|q#%!*V=^&TH|tO#lug zbPPScgFy-h)yjm{HnbHr;gvzdYz}3F9Hr66nP~TxkIrmX8^Z`nJ)!Zys*x~i5yyiA zFG+l@ZEzN{bPSEKyJWqYPfKh0%D~e4Nnf9$+>x0>>jaPv0B}yxMjKK9dN#INB!6n$ z#~M#K9cC)sbjALErQN{AgfN~}r#G-nd^BSA!%)DPSJ#9DdyI8_|DY6uymG~$2jpi$ zQ>-1y;*M|Wxt4FZ0VYXZ%}P5%g)eAZQA2i3lr@%Rh9>Gi;cZ+?2|6M>ll z>J}}1wB{2?<>u6mTRIXu8b_BX{J-6><*dVT$eTBT8J{L&!+3C;BD1rvuYuhHF;8{8 zQ)^BjmNlgbTkeqPm6b2sPbI>@NHly0`qJ%m4~6m$k2 zIZ(#DZ)glNu@M>{^c+DeTglVV*KE3 zz`=sp7EzVg64RmB#$|Cuymg-H0)A)kf%y1%`aw98n5=6hg=p&P? z9q7RG#bI#wICqbtjv;#y(GF+nK1a}HbB-7tdu9GF$2Pgu_4T~DPkel(q8XK3CJq(1 zAC&RiyOk-5UhcMTr#5%4ji@2Unq*H7_EX#ugj1x}^sm_IViJ>6VtXUE;R+luu`SxS zid2!9y_hO<`fuf*arD<-?Ha_lOOseuPzM8$bU4?A*sC9cZMMek1n--73oL!8@)pjyO^GmWJ17DxbFwwZ?>PB5AxD)L!t0M6y6OJ=5Dsw^k3~)39Ki*1MN7*Gu^uS zcn2ap+}(4ZHAsif2>)KEH>p06lgOv6=0G_2N5}_XW_dM9l$k0lJwQQXB6!9yMal|@ zbXo@n?{+f2J1Zi(fb&EZvlPlPkN^fu8K=Oj}FISvK!kkR6w62xmiS0Lm;_ZMs)w*hs^uk@r zi!K5FkcuzOzxd}}b#6y?Y{2IK?54LDxNG%A1Hq!38nzu+3^^G z<9OWrZhVDE;@Z)L7>Oi}<6d6_9`57qhu@MG<&LdMm}#<#QEi@u&Rwx*`77q-=GEcA z5F^+3wRv~92WIm^XWqu4T34W-bOy5BHI>DC-7&le9XJIc-9a6loj73@iXV;nNy(qJ z_}?B;Rr^s#lI0NVq)>6Gt&Yoi$uQ7-F1?^sOvJTP^G;16O92yqCD%ml3T*6hMT^cD zRhluHrmM&l%HA}1HO(I6d}*G`{Da!T;rmwPC#YHqvN=t^<_i>b>q;Ga&Zq?e7X9hi z^?Kf3tyT`bv}nw;|Liab90mNtt3>fU=4x!t!~U%^>pt;8zx2nV9QVoSvRJMyNuDV4 zv5Vj@Ls|1FBE98xkWy@yx@M=zr+cT&=69&P=^Oe9ecMjl?YCGkkH3tAX6!->L<26a z-Kg!x>&h_wj#OmYG;#eU#N4-U&PK*y#A8;EmkrSyt!&*P^jcaJE-URVhK(k7!I#}7 zc=cQy|EzTJo#&*)%~(VeI)E)Fhz_~56ulIyB(s=2bG$Zhg}O%hcQ48ZpVFc$ty_g! z4u*znqi}Gr_df07jntKq-7VeVMQ z)(4M;)lp~vVqfa%Obd9n-rQ>an>tT`U`AzYOGZSDWm!PYkg=p9;0|orKEhTn=sgt0 zhEQj=P+%$H{P0mS#W^G^8rz;o_v)Z*!`XJw>E^K0rOCb_mN4MOJoyKdyMC7uIc9qs zcSVNQ;d+48Hzg}l)fE*^wjps=YV?!StX^Q@=F8I-e<4F+{+B)Oc60S=0(*9F(Hart!5pnRV_aE_nI zmVuGYkmwOX`_Pu(_Iy=PLlpa;@!Cpv8tCA_a?yVJ`_lSP840FezVboo0}!P7RvJ_R z%{uS@n$mvYl=vgv5%DPIfOfiRRw~*9b@9XND9E9zK|!HOJx+0-$jkGj_(bsap={g} zQgi#dC#hM3c>CmNhb(dN^QiHh$UML0pU2DRz+b5=D+ zsWOWdnM5vx4IeU1IiE;bL5t6G0A|xb+X}sS=8pMK%zk{f4%bmba?HMRt}ek7-rEj< z#fvb0@~Yr8mUaE@v77VUg8ua)b|$=-eH(N0^zd8^ZAeN-cw2_QKw=y(qF13Q6{n|f z|M!)oB>&Kr5_DKHr=^+*rB_gt7sZaMNyJ}&uajMfm8{TL@{0JBCfq;$D#C+yezLb; zd|T_|=f&VkKRy^BFvXaF=-a-5{Z`eS_5AaebP?Q=PG&*LD`(%8Pp%pH^}ee7-`+;_ zFL-A9o*_P$zCSMt-D2j$k$5#MG<@eFcOUf4^oNC|Q?dlH2houFlWYcmg=05|%bh7? zeM~}MtKI5_4Fr&Wj2)r15)|}*x_nSwq*UyI@@N`xST2oVpT5N!XHi{}D^t3LW z)QWYzln?}cv`F-@tpJ-bx;2s|w(^WsB^_*bQKh+#fV_AwFOu0j+L zhwf}0{96B>DmmoSin7%d_O_O{J?}3_-K{!xpZ7NQ_1O(piGa>BCsb~N8fz(%;B5`S z><96Y71j{(#eq3vk|K+edR73!{2M5dH}c1Qy|cIIhJzvK@RXPKN|HlJ7Jc}YZ)x@R z=6GiB+z>kK;_-@eC`_D*ELPO!BWtwUb{4TlSlBi^{-ZU3lRqhQOT4Oj1Jq$=W>0VM z+{dD6A_66!;&N;G?v>?NJnBa*+$P)Xf=(NM%N(uPBV1I>u+xMQdzMejPXd3a z9q)SU?37-g=>@v+(O*b`k6cy3-Gpik&WnP&pu)H1!R2pc?@srJhOS1qYmqM9$E}w4 z(b&5mLotm9<t93*u}%_?&I@<({Y~xI@y}YYbBk;1;BMyD z;^O|%)9HzryP2v{H^`S(=iy}m#Zv?v-Rx5NHb-kYv%5T}@YGaUER3yRC;>xehpD!es1gMDY)rLAZ4`DY_hw!C7jR>u(TKM-eB8GtSm3a zstZT$5maSzy-rWzwtu?^K)ymZW95bGe{|MtH1A7e^2Jj zh&aEAV%iw0dSO6u2A+JGRA_OB+bc^SPqbZ!3Txk_Z=2>rQN z=Vock1nN#SB$^R)M-Sle9ulB-9$_v3b(duYR-=9@OfkQ`+}vu!_ReUIg6erUr9` z7^=Hgn6q0LrwQ1a{$~BSfVntOrqCTWDg;%v-waLrPIGb1|1^KhHvi0K29+EG$LGB| zUTFD@uEmy}4Gw1v9*w+?J$S?KW>^EXx)N2+TC zhONu}Nda!+B~dT04W+#&CLTBJcxA6 zPcr?5?VaFqQp3@hM6^I-40PiJ{kS5$gGlOXz$JK?u_l-{sk z^&S$X))sE=9Q3;%q{FW@Czd1#hf#5VtC(ppQgOw7E`vkrTc^}|fQ-3!v_JhmiKM|HrA2=Bl&?)2e)`;lG^#ZViDV4_R$p6~Js? ztK4U6+^#q|xg*yn)6VP}v(xi9#8;AAr`&=Zn~=W#0?9ANmZ)LzXh=a~C+wtPXUDyM z6h@*TXZ5@<{^5>Hy!mSll$Etg)A9XMn_4$PVj>{!fBQm>(Uu>GWFg-A1U3%q- zIW{nU5#n6K@#^b}C`pGruWVi~g0^OSuGJqe-QckH;(U>ljsE?j&C@rLrKlj?dw~zF zSm$QbZSRUF!86E4BvL`}S%M4Jt+2-qE~L|xS~P;Wva@JQTSLutv&NZLtoo~^Vt0tb zmjFzeDM|3wz>BmVNP=3eCmeQOYTx*7sZ1kyw%Bu;z85%+ zq@9l@iwHik5aU-k`WKtEIk@&K@n2U<)!}T5MvHm-%|$QF;vQ0)G6^N?rpU-HIrwZR z;|I7qQ_QvKy}ZrK1%N&Zke^v|DL2$UYEX<&c;LkykuJR<52H7suV3J^j*J6JKh0PN z#Oy6qY&&6Fk5bo94sA$KmQvJsD9MwS`}qFif2tL-SS$0dpI?Zc(v;*oAHxCD4|MA- z4F(8{p5fONvZqT8@lF=nGL{2+4*D_s$B(k5}$UmeZ7|j zD(=(@Hiu`Ke7^e^)z#Ito@z{&pknX+4Hje$XR;()V40J6`k3|ScoU!Pabun5@9%mP zmE0H)8ujqF3@j`{ssH>D@QaMH5^8TCZ^LDO{!!%PNEn6MW7YyC+i#)^Ow8An7w4hu zJ@(nP%+vtDo!CBc0r?3jw%d0#ygUU24b7gQ#AL4HJ^wT?jFCKsgZ06I)s3?0qQi$N zB1!(9M3$G;5+Nl%L^iTl=&#ok5~E5*pOeBWrLW$koe8@$Zw6)W)1O4YY46?P5(SAV zQT%^;4ds0^Zq*?DWKH2F&`MIl^ zWEn%ensMHAjJ3`FI1qZl*{@K`N&MXJDJ!0e+qa*e+GM{4^Tk)bR+MV8-stG&VK7`i zKAqZPTO9O+%>d^;IPwo^(&- z+FY-X4}F7=lL%`%MHaXyLv>oz)~+?>bxYyv?uV!4Q$xcnTb0^<-wehR<%%U;Jo>Og9FXpA z7+m9CzO^|~+=lCrvnjn1kK-e#&g&3sd&NfXGTJ0kul{Ll{gzl81UqJ8_%IE*41!RmC`9Gbpt%HjA}7%@P?8(&foUCm1E*2&oP zA?!^}75N2RqeGh;addDgdKQg0I&z5<894GRqif|!!3NMzWJqa_F-WrD_LYmrp1Hn| z-7Lagf`8mNvVumy?6;R;ff`k9|FlT-ilx{F(5Q|&)E(*xCmJ>xaZjpw`2yF}9d;*_1R z_t7&i=K$3fV-{5>8-EF-Ja#@rS&T{rkI-8f{%WI`b)?cK3Er*wIuc1Bfos##&3)2p zP)wC7<6gKp`E7wy8J?h-et+SU-WxMo1qIc0l;u17=TaMHv%A&z!NcLz_iUq}^ALcRQGp zO3#doE5|#DE|A17N&RrT%=+<_Q}UAjR}>vMemq*pZZSq4keZc7wkj?Tyw0KDeUqAX zGZq}z9c5m3xA==aFv2W4<~sN*{{4?ULGuufMXW;sxyI+iSm?i7hO@%9UYV(+`Q>Nos%vF8g!Usd2P z;4~-_8`!v6@(tpz_4Q(RM26{pkU|)UyNr=ihw-ukPHw<UpU+AXw!RaEXpRZ`!! zYg8dc?5IoMJQ2hB>hz-+?AEJm77QYbCtHtF_p0^ms1x@`UMtAF;}i{5AxiVl9DDpj zl)*5)Ng<4^TDD4i$KlbhQ-E&f_bUF+KzD6OX^sBayL(UNNV{|$loE2{yD|2UlLV?J z@Ig(y`w&7yeCv-`?uUV^&4RXrHsy&k@i}adNm;XgZ!a@xnvjG)yI_LjRiUqV%gYIh zTK1D&S;x6J%jL!y86wNhlMbcxK=q;CDA?OTEGBAUdVZ$JYB=ElyA%2HUEC_MuhHw9 zfP)~1CR0x8cHDC6+A8>NSYxQ2z$vA2UJn>pzZdq@C^#Xoh zdqe|=^fm{HmPOP#EjbbH25nT$CZP%K7azkF(mG$3cnFnvV!sc|V%0fVJ$l8KpsRTu zO8L$dH*_-Z+K;9`{p&$Rca2+turcwk=8~cyK0rNk55^Im*gM#q=U-^i{<0)$3uHRn zH_J=aK6A*?VLE!3Hi&0;r$KN%3v1#-jxKH%pl+cXKmYXX5gm8@@y1#xCav0t9od(z z48bdZip}mIsrXig{8+&@W$YEwRGTr);Lw|2E0DvqPPPlK%Q*y-eRpGMtZQa*dHiOB zm&!{b3*PxxlCIhz1he8Qe_ituN*=VlqosmzZgl~c62oxde$5Fm7!q248t=D%7jc(T&EAIMN0uPq5-R!nvG8HJu)x# z2l7Bbq!k*ScO@_{>}1p$JUt%!O}$q309mlnN$TVTn`5E)<0cDkchxB5N9ij>^1C4R z#OSfF27Mj!AhRy0lnNE`7ddO(RS@~@s9$AV72Rat8_}SIGlyS`bO`b4OLVX-@+it2;l!x9Kc))(Q=DJL~4JFw^ z(QdVI!ny}MfWXZX+W7j09)ZfAZ3qAKqN*1(7zzgC2SM1%t1q&GJt^ZKz5~NjeW$5Z JrC|B>e*nH7H{}2T diff --git a/docs/docs/tutorial-extras/manage-docs-versions.md b/docs/docs/tutorial-extras/manage-docs-versions.md deleted file mode 100644 index ccda0b9..0000000 --- a/docs/docs/tutorial-extras/manage-docs-versions.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Manage Docs Versions - -Docusaurus can manage multiple versions of your docs. - -## Create a docs version - -Release a version 1.0 of your project: - -```bash -npm run docusaurus docs:version 1.0 -``` - -The `docs` folder is copied into `versioned_docs/version-1.0` and `versions.json` is created. - -Your docs now have 2 versions: - -- `1.0` at `http://localhost:3000/docs/` for the version 1.0 docs -- `current` at `http://localhost:3000/docs/next/` for the **upcoming, unreleased docs** - -## Add a Version Dropdown - -To navigate seamlessly across versions, add a version dropdown. - -Modify the `docusaurus.config.js` file: - -```js title="docusaurus.config.js" -export default { - themeConfig: { - navbar: { - items: [ - // highlight-start - { - type: 'docsVersionDropdown', - }, - // highlight-end - ], - }, - }, -}; -``` - -The docs version dropdown appears in your navbar: - -![Docs Version Dropdown](./img/docsVersionDropdown.png) - -## Update an existing version - -It is possible to edit versioned docs in their respective folder: - -- `versioned_docs/version-1.0/hello.md` updates `http://localhost:3000/docs/hello` -- `docs/hello.md` updates `http://localhost:3000/docs/next/hello` diff --git a/docs/docs/tutorial-extras/translate-your-site.md b/docs/docs/tutorial-extras/translate-your-site.md deleted file mode 100644 index b5a644a..0000000 --- a/docs/docs/tutorial-extras/translate-your-site.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -sidebar_position: 2 ---- - -# Translate your site - -Let's translate `docs/intro.md` to French. - -## Configure i18n - -Modify `docusaurus.config.js` to add support for the `fr` locale: - -```js title="docusaurus.config.js" -export default { - i18n: { - defaultLocale: 'en', - locales: ['en', 'fr'], - }, -}; -``` - -## Translate a doc - -Copy the `docs/intro.md` file to the `i18n/fr` folder: - -```bash -mkdir -p i18n/fr/docusaurus-plugin-content-docs/current/ - -cp docs/intro.md i18n/fr/docusaurus-plugin-content-docs/current/intro.md -``` - -Translate `i18n/fr/docusaurus-plugin-content-docs/current/intro.md` in French. - -## Start your localized site - -Start your site on the French locale: - -```bash -npm run start -- --locale fr -``` - -Your localized site is accessible at [http://localhost:3000/fr/](http://localhost:3000/fr/) and the `Getting Started` page is translated. - -:::caution - -In development, you can only use one locale at a time. - -::: - -## Add a Locale Dropdown - -To navigate seamlessly across languages, add a locale dropdown. - -Modify the `docusaurus.config.js` file: - -```js title="docusaurus.config.js" -export default { - themeConfig: { - navbar: { - items: [ - // highlight-start - { - type: 'localeDropdown', - }, - // highlight-end - ], - }, - }, -}; -``` - -The locale dropdown now appears in your navbar: - -![Locale Dropdown](./img/localeDropdown.png) - -## Build your localized site - -Build your site for a specific locale: - -```bash -npm run build -- --locale fr -``` - -Or build your site to include all the locales at once: - -```bash -npm run build -``` From 3bd0e0e7164585aa22fec6964f99199dd6b20b83 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:45:57 -0500 Subject: [PATCH 10/26] added create a modern web app usage example placeholder --- docs/docs/usage/_category_.json | 8 ++++++++ docs/docs/usage/modern-web-app.md | 5 +++++ 2 files changed, 13 insertions(+) create mode 100644 docs/docs/usage/_category_.json create mode 100644 docs/docs/usage/modern-web-app.md diff --git a/docs/docs/usage/_category_.json b/docs/docs/usage/_category_.json new file mode 100644 index 0000000..37b7b21 --- /dev/null +++ b/docs/docs/usage/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Usage", + "position": 3, + "link": { + "type": "generated-index", + "description": "Learn how to use RA.Aid effectively in different scenarios." + } +} diff --git a/docs/docs/usage/modern-web-app.md b/docs/docs/usage/modern-web-app.md new file mode 100644 index 0000000..71df7d1 --- /dev/null +++ b/docs/docs/usage/modern-web-app.md @@ -0,0 +1,5 @@ +# Create a Modern Web App + +This section will guide you through creating a modern web application using RA.Aid. Stay tuned for detailed instructions and best practices. + +Coming soon... From 1681234ab968aaedfbd8176c71339705c34dfd89 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 16:52:47 -0500 Subject: [PATCH 11/26] modern web app examples --- docs/docs/usage/modern-web-app.md | 44 +++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/docs/docs/usage/modern-web-app.md b/docs/docs/usage/modern-web-app.md index 71df7d1..083acea 100644 --- a/docs/docs/usage/modern-web-app.md +++ b/docs/docs/usage/modern-web-app.md @@ -1,5 +1,45 @@ # Create a Modern Web App -This section will guide you through creating a modern web application using RA.Aid. Stay tuned for detailed instructions and best practices. +When using AI tools like RA.Aid to create a modern web application, it's most effective to break down the work from a high level into discrete tasks. This guide will walk you through a practical example of building a Next.js application with common modern features. -Coming soon... +## Setting Up a Fresh Next.js Application + +To get started with a new Next.js project, you can use RA.Aid with a simple command like: + +``` +ra-aid -m "Initialize a new nextjs app." +``` + +RA.Aid will execute the necessary commands and set up a clean Next.js project for you with the latest best practices. It'll even search the web to read documentation about the latest version of Next. + +## Adding shadcn/ui Components + +Once your base Next.js application is ready, you can add the shadcn/ui component library. Tell RA.Aid: + +``` +ra-aid -m "Install shadcn into this project and put a few examples of shadcn components on the main page." +``` + +This will configure your project with shadcn/ui's CLI, set up the necessary styling, and add your first components. + +## Integrating Prisma with SQLite + +For database integration, you can add Prisma ORM with SQLite. Simply instruct RA.Aid: + +``` +ra-aid -m "Integrate prisma/sqlite into this project." +``` + +RA.Aid will handle the Prisma setup, create your database schema, and set up your first model. + +## Adding Features Incrementally + +With the foundation in place, you can start adding features one by one. Here's an example: + +``` +ra-aid -m "Add a simple user registration form. Include fields for email and password." +``` + +Keep your feature requests focused and specific. This allows RA.Aid to implement them effectively while maintaining code quality. + +Remember to build your application incrementally, testing each feature as it's added. This approach helps manage complexity and ensures a stable development process. \ No newline at end of file From 81198f91e8f530f02ced667927f8604d6d55c02c Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:04:43 -0500 Subject: [PATCH 12/26] usage guides --- docs/docs/getting-help.md | 11 +++++++++++ docs/docs/usage/cpp-game.md | 19 +++++++++++++++++++ docs/docs/usage/monorepo.md | 17 +++++++++++++++++ 3 files changed, 47 insertions(+) create mode 100644 docs/docs/getting-help.md create mode 100644 docs/docs/usage/cpp-game.md create mode 100644 docs/docs/usage/monorepo.md diff --git a/docs/docs/getting-help.md b/docs/docs/getting-help.md new file mode 100644 index 0000000..fe13d0f --- /dev/null +++ b/docs/docs/getting-help.md @@ -0,0 +1,11 @@ +# Getting Help + +## Bug Reports and Feature Requests + +If you encounter any issues or have ideas for improvements, please file them on our [GitHub Issues page](https://github.com/ai-christianson/RA.Aid/issues). + +## Community Support + +Join our Discord community to chat with other users and get help: + +- [Join the Discord Server](https://discord.gg/f6wYbzHYxV) diff --git a/docs/docs/usage/cpp-game.md b/docs/docs/usage/cpp-game.md new file mode 100644 index 0000000..7a68c8a --- /dev/null +++ b/docs/docs/usage/cpp-game.md @@ -0,0 +1,19 @@ +# Create a Game using RA.Aid + +RA.Aid can work on almost any kind of software, including writing games in languages like C++. + +## Creating the Initial Game + +To get started with creating a game, you can use RA.Aid with a simple command like: + +``` +ra-aid -m "Create a basic breakout clone in C++ using opengl." +``` + +## Adding Features + +Once your base game is working, you can add features like this: + +``` +ra-aid -m "Add score tracking/display to the main screen." +``` \ No newline at end of file diff --git a/docs/docs/usage/monorepo.md b/docs/docs/usage/monorepo.md new file mode 100644 index 0000000..6c2f3fd --- /dev/null +++ b/docs/docs/usage/monorepo.md @@ -0,0 +1,17 @@ +# Work with an Existing Mono Repo using RA.Aid + +RA.Aid is good at handling complex monorepo projects. For example, if you you have an app set up like this: + +``` +app/ +web/ +backend/ +``` + +You can run RA.Aid at the top level and give it commands like this: + +``` +ra-aid -m "Update the user form to support birthdates. Be sure to also update the DB model and migration scripts." +``` + +RA.Aid will proceed to work on multiple high-level components of your application. \ No newline at end of file From cc16aa81da5866a23937676d9f849ad9377ee2bf Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:16:37 -0500 Subject: [PATCH 13/26] docs --- docs/docs/contributing.md | 79 +++++++++++++++++++++++++++++++++++++++ docs/docs/intro.md | 11 +++--- 2 files changed, 84 insertions(+), 6 deletions(-) create mode 100644 docs/docs/contributing.md diff --git a/docs/docs/contributing.md b/docs/docs/contributing.md new file mode 100644 index 0000000..3d0e7a6 --- /dev/null +++ b/docs/docs/contributing.md @@ -0,0 +1,79 @@ +# Contributing to RA.Aid + +Welcome to the RA.Aid community! We're thrilled you're interested in contributing. This project thrives thanks to contributors like you, and we're excited to have you join us on this journey. + +## Ways to Contribute + +There are many valuable ways to contribute to RA.Aid: + +### 1. Join Our Community +- Join our Discord community to connect with other users and contributors +- Help answer questions from other users +- Share your experiences and use cases +- Provide feedback and suggestions + +### 2. Report Issues +- Found a bug? Open an issue on our [GitHub repository](https://github.com/ai-christianson/RA.Aid/issues) +- Before creating a new issue, please check if it already exists +- Include as much detail as possible: + - Steps to reproduce + - Expected vs actual behavior + - Your environment (OS, Python version, etc.) + - Any relevant error messages + +### 3. Contribute to Documentation +- Our documentation lives in the `docs/` folder +- Found a typo? Have an idea for better explanations? Open a PR! +- You can use RA.Aid itself to help draft documentation changes +- Even small improvements are welcome + +### 4. Code Contributions +- Look for issues labeled "help wanted" or "good first issue" on our GitHub +- Feel free to pick up any open issue - don't be shy! +- **You can even use RA.Aid to help understand the codebase and make changes** +- Before starting work on larger changes, please open an issue to discuss + +## Making Your First Contribution + +1. Fork the repository +2. Create a branch for your changes +3. Make your changes +4. Write/update tests if needed +5. Submit a Pull Request +6. Wait for review and address any feedback + +Don't hesitate to ask questions if you're unsure about anything. Remember: every expert was once a beginner! + +## Development Setup + +1. Clone the repository: + +```bash +git clone https://github.com/ai-christianson/RA.Aid.git +cd RA.Aid +``` + +2. Create and activate a virtual environment: + +```bash +python -m venv venv +source venv/bin/activate +``` + +3. Install in dev mode: + +```bash +pip install -e . +``` + +4. Run RA.Aid: + +```bash +ra-aid -m "Your task or query here" +``` + +## This is Your Project Too + +RA.Aid is a community project that grows stronger with each contribution. Whether it's fixing a typo in documentation, reporting a bug, or adding a new feature - every contribution matters and is valued. + +Don't feel like you need to make massive changes to contribute. Small, focused contributions are often the best way to start. Use what you know, and learn as you go! diff --git a/docs/docs/intro.md b/docs/docs/intro.md index 057f01f..c529780 100644 --- a/docs/docs/intro.md +++ b/docs/docs/intro.md @@ -18,9 +18,8 @@ RA.Aid (pronounced "raid") is your AI-powered development companion that helps y Ready to get started? Jump right to: -- [Installation Guide](/installation) -- [Configuration Guide](/configuration) -- [Basic Usage Examples](/usage) +- [Installation Guide](/quickstart/installation) +- [Basic Usage Examples](/usage/modern-web-app) ### Basic Example @@ -49,9 +48,9 @@ ra-aid -m "Add input validation to the login form" ## Next Steps -- Check out the [Installation Guide](/installation) to set up RA.Aid -- Learn about [Configuration](/configuration) options -- See [Usage Examples](/usage) to get started quickly +- Check out the [Installation Guide](/quickstart/installation) to set up RA.Aid +- See [Usage Examples](/usage/modern-web-app) to get started quickly +- Read our [Contributing Guide](/contributing) to get involved - Join our [Discord Community](https://discord.gg/f6wYbzHYxV) for help and discussions Ready to revolutionize your development workflow? Let's get started! 🚀 From feecfea3b8c144b22bac673cee611ea1a24c554c Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:22:26 -0500 Subject: [PATCH 14/26] initial open models docs. --- docs/docs/quickstart/open-models.md | 84 +++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 docs/docs/quickstart/open-models.md diff --git a/docs/docs/quickstart/open-models.md b/docs/docs/quickstart/open-models.md new file mode 100644 index 0000000..dd18cab --- /dev/null +++ b/docs/docs/quickstart/open-models.md @@ -0,0 +1,84 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Open Models Configuration + +RA.Aid supports various open source model providers and configurations. This guide shows you how to configure and use different open models with RA.Aid. + +## Supported Providers + + + + +### DeepSeek Models + +To use DeepSeek models, you'll need a DeepSeek API key. Set it in your environment: + +```bash +export DEEPSEEK_API_KEY=your_api_key_here +``` + +Then run RA.Aid with the deepseek provider and model: + +```bash +ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner +``` + +You can also access DeepSeek models through OpenRouter: + +```bash +ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 +``` + + + + +### OpenRouter Integration + +OpenRouter provides access to various open source models. First, set your API key: + +```bash +export OPENROUTER_API_KEY=your_api_key_here +``` + +Example using Mistral: + +```bash +ra-aid -m "Your task" --provider openrouter --model mistralai/mistral-large-2411 +``` + + + + +### Expert Tool Configuration + +The expert tool can be configured to use open models for complex logic and debugging tasks: + +```bash +# Use DeepSeek for expert tool +export EXPERT_DEEPSEEK_API_KEY=your_deepseek_api_key +ra-aid -m "Your task" --expert-provider deepseek --expert-model deepseek-reasoner + +# Use OpenRouter for expert +export EXPERT_OPENROUTER_API_KEY=your_openrouter_api_key +ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411 +``` + + + + +## Environment Variables + +Here are all the environment variables supported for open model configuration: + +- `OPENROUTER_API_KEY`: Required for OpenRouter provider +- `DEEPSEEK_API_KEY`: Required for DeepSeek provider +- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider +- `EXPERT_DEEPSEEK_API_KEY`: API key for expert tool using DeepSeek provider + +## Notes and Best Practices + +- Set environment variables in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`) for persistence +- Consider using different models for different types of tasks (e.g., DeepSeek for reasoning, Mistral for general tasks) +- Review model performance and adjust based on your specific needs +- Keep your API keys secure and never commit them to version control From 193d4c6955bfaeba1ef11fbc7049fa09f6aed40a Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:25:18 -0500 Subject: [PATCH 15/26] docs cleanup --- docs/docs/quickstart/open-models.md | 102 ++++++++++------------------ docs/docs/quickstart/recommended.md | 102 ++++++++++++++++++---------- 2 files changed, 102 insertions(+), 102 deletions(-) diff --git a/docs/docs/quickstart/open-models.md b/docs/docs/quickstart/open-models.md index dd18cab..4df30a7 100644 --- a/docs/docs/quickstart/open-models.md +++ b/docs/docs/quickstart/open-models.md @@ -1,84 +1,52 @@ -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; +# Recommended Config -# Open Models Configuration +This configuration combines the strengths of multiple AI models to provide the best experience: -RA.Aid supports various open source model providers and configurations. This guide shows you how to configure and use different open models with RA.Aid. +- Anthropic Sonnet excels at driving the agent's core reasoning and planning +- OpenAI's models provide robust debugging and logical analysis capabilities +- Tavily web search integration allows the agent to find relevant information online -## Supported Providers +:::info +RA.Aid must be installed before using these configurations. If you haven't installed it yet, please see the [Installation Guide](installation). +::: - - +## Getting API Keys -### DeepSeek Models +To use RA.Aid with the recommended configuration, you'll need to obtain API keys from the following services: -To use DeepSeek models, you'll need a DeepSeek API key. Set it in your environment: +1. **OpenAI API Key**: Create an account at [OpenAI's platform](https://platform.openai.com) and generate an API key from your dashboard. + +2. **Anthropic API Key**: Sign up at [Anthropic's Console](https://console.anthropic.com), then generate an API key from the API Keys section. + +3. **Tavily API Key** (optional): Create an account at [Tavily](https://app.tavily.com/sign-in) and get your API key from the dashboard. + +Please keep your API keys secure and never share them publicly. Each service has its own pricing and usage terms. + +## Configuration + +Configure your API keys: ```bash -export DEEPSEEK_API_KEY=your_api_key_here +# For OpenAI (required) +export OPENAI_API_KEY=your_api_key_here + +# For Anthropic (required) +export ANTHROPIC_API_KEY=your_api_key_here + +# For web search capability (optional) +export TAVILY_API_KEY=your_api_key_here ``` -Then run RA.Aid with the deepseek provider and model: +## Basic Usage + +Start RA.Aid in interactive chat mode: ```bash -ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner +ra-aid --chat ``` -You can also access DeepSeek models through OpenRouter: +Or run with a single command: ```bash -ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 +ra-aid -m "Help me understand this code" ``` - - - - -### OpenRouter Integration - -OpenRouter provides access to various open source models. First, set your API key: - -```bash -export OPENROUTER_API_KEY=your_api_key_here -``` - -Example using Mistral: - -```bash -ra-aid -m "Your task" --provider openrouter --model mistralai/mistral-large-2411 -``` - - - - -### Expert Tool Configuration - -The expert tool can be configured to use open models for complex logic and debugging tasks: - -```bash -# Use DeepSeek for expert tool -export EXPERT_DEEPSEEK_API_KEY=your_deepseek_api_key -ra-aid -m "Your task" --expert-provider deepseek --expert-model deepseek-reasoner - -# Use OpenRouter for expert -export EXPERT_OPENROUTER_API_KEY=your_openrouter_api_key -ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411 -``` - - - - -## Environment Variables - -Here are all the environment variables supported for open model configuration: - -- `OPENROUTER_API_KEY`: Required for OpenRouter provider -- `DEEPSEEK_API_KEY`: Required for DeepSeek provider -- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider -- `EXPERT_DEEPSEEK_API_KEY`: API key for expert tool using DeepSeek provider - -## Notes and Best Practices - -- Set environment variables in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`) for persistence -- Consider using different models for different types of tasks (e.g., DeepSeek for reasoning, Mistral for general tasks) -- Review model performance and adjust based on your specific needs -- Keep your API keys secure and never commit them to version control diff --git a/docs/docs/quickstart/recommended.md b/docs/docs/quickstart/recommended.md index 4df30a7..dd18cab 100644 --- a/docs/docs/quickstart/recommended.md +++ b/docs/docs/quickstart/recommended.md @@ -1,52 +1,84 @@ -# Recommended Config +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; -This configuration combines the strengths of multiple AI models to provide the best experience: +# Open Models Configuration -- Anthropic Sonnet excels at driving the agent's core reasoning and planning -- OpenAI's models provide robust debugging and logical analysis capabilities -- Tavily web search integration allows the agent to find relevant information online +RA.Aid supports various open source model providers and configurations. This guide shows you how to configure and use different open models with RA.Aid. -:::info -RA.Aid must be installed before using these configurations. If you haven't installed it yet, please see the [Installation Guide](installation). -::: +## Supported Providers -## Getting API Keys + + -To use RA.Aid with the recommended configuration, you'll need to obtain API keys from the following services: +### DeepSeek Models -1. **OpenAI API Key**: Create an account at [OpenAI's platform](https://platform.openai.com) and generate an API key from your dashboard. - -2. **Anthropic API Key**: Sign up at [Anthropic's Console](https://console.anthropic.com), then generate an API key from the API Keys section. - -3. **Tavily API Key** (optional): Create an account at [Tavily](https://app.tavily.com/sign-in) and get your API key from the dashboard. - -Please keep your API keys secure and never share them publicly. Each service has its own pricing and usage terms. - -## Configuration - -Configure your API keys: +To use DeepSeek models, you'll need a DeepSeek API key. Set it in your environment: ```bash -# For OpenAI (required) -export OPENAI_API_KEY=your_api_key_here - -# For Anthropic (required) -export ANTHROPIC_API_KEY=your_api_key_here - -# For web search capability (optional) -export TAVILY_API_KEY=your_api_key_here +export DEEPSEEK_API_KEY=your_api_key_here ``` -## Basic Usage - -Start RA.Aid in interactive chat mode: +Then run RA.Aid with the deepseek provider and model: ```bash -ra-aid --chat +ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner ``` -Or run with a single command: +You can also access DeepSeek models through OpenRouter: ```bash -ra-aid -m "Help me understand this code" +ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 ``` + + + + +### OpenRouter Integration + +OpenRouter provides access to various open source models. First, set your API key: + +```bash +export OPENROUTER_API_KEY=your_api_key_here +``` + +Example using Mistral: + +```bash +ra-aid -m "Your task" --provider openrouter --model mistralai/mistral-large-2411 +``` + + + + +### Expert Tool Configuration + +The expert tool can be configured to use open models for complex logic and debugging tasks: + +```bash +# Use DeepSeek for expert tool +export EXPERT_DEEPSEEK_API_KEY=your_deepseek_api_key +ra-aid -m "Your task" --expert-provider deepseek --expert-model deepseek-reasoner + +# Use OpenRouter for expert +export EXPERT_OPENROUTER_API_KEY=your_openrouter_api_key +ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411 +``` + + + + +## Environment Variables + +Here are all the environment variables supported for open model configuration: + +- `OPENROUTER_API_KEY`: Required for OpenRouter provider +- `DEEPSEEK_API_KEY`: Required for DeepSeek provider +- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider +- `EXPERT_DEEPSEEK_API_KEY`: API key for expert tool using DeepSeek provider + +## Notes and Best Practices + +- Set environment variables in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`) for persistence +- Consider using different models for different types of tasks (e.g., DeepSeek for reasoning, Mistral for general tasks) +- Review model performance and adjust based on your specific needs +- Keep your API keys secure and never commit them to version control From 3f6c1562611c1e855218c5eb4e0f55ecc7060c5d Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:30:11 -0500 Subject: [PATCH 16/26] fix docs --- docs/docs/quickstart/open-models.md | 102 ++++++++++++++++++---------- docs/docs/quickstart/recommended.md | 102 ++++++++++------------------ 2 files changed, 102 insertions(+), 102 deletions(-) diff --git a/docs/docs/quickstart/open-models.md b/docs/docs/quickstart/open-models.md index 4df30a7..dd18cab 100644 --- a/docs/docs/quickstart/open-models.md +++ b/docs/docs/quickstart/open-models.md @@ -1,52 +1,84 @@ -# Recommended Config +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; -This configuration combines the strengths of multiple AI models to provide the best experience: +# Open Models Configuration -- Anthropic Sonnet excels at driving the agent's core reasoning and planning -- OpenAI's models provide robust debugging and logical analysis capabilities -- Tavily web search integration allows the agent to find relevant information online +RA.Aid supports various open source model providers and configurations. This guide shows you how to configure and use different open models with RA.Aid. -:::info -RA.Aid must be installed before using these configurations. If you haven't installed it yet, please see the [Installation Guide](installation). -::: +## Supported Providers -## Getting API Keys + + -To use RA.Aid with the recommended configuration, you'll need to obtain API keys from the following services: +### DeepSeek Models -1. **OpenAI API Key**: Create an account at [OpenAI's platform](https://platform.openai.com) and generate an API key from your dashboard. - -2. **Anthropic API Key**: Sign up at [Anthropic's Console](https://console.anthropic.com), then generate an API key from the API Keys section. - -3. **Tavily API Key** (optional): Create an account at [Tavily](https://app.tavily.com/sign-in) and get your API key from the dashboard. - -Please keep your API keys secure and never share them publicly. Each service has its own pricing and usage terms. - -## Configuration - -Configure your API keys: +To use DeepSeek models, you'll need a DeepSeek API key. Set it in your environment: ```bash -# For OpenAI (required) -export OPENAI_API_KEY=your_api_key_here - -# For Anthropic (required) -export ANTHROPIC_API_KEY=your_api_key_here - -# For web search capability (optional) -export TAVILY_API_KEY=your_api_key_here +export DEEPSEEK_API_KEY=your_api_key_here ``` -## Basic Usage - -Start RA.Aid in interactive chat mode: +Then run RA.Aid with the deepseek provider and model: ```bash -ra-aid --chat +ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner ``` -Or run with a single command: +You can also access DeepSeek models through OpenRouter: ```bash -ra-aid -m "Help me understand this code" +ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 ``` + + + + +### OpenRouter Integration + +OpenRouter provides access to various open source models. First, set your API key: + +```bash +export OPENROUTER_API_KEY=your_api_key_here +``` + +Example using Mistral: + +```bash +ra-aid -m "Your task" --provider openrouter --model mistralai/mistral-large-2411 +``` + + + + +### Expert Tool Configuration + +The expert tool can be configured to use open models for complex logic and debugging tasks: + +```bash +# Use DeepSeek for expert tool +export EXPERT_DEEPSEEK_API_KEY=your_deepseek_api_key +ra-aid -m "Your task" --expert-provider deepseek --expert-model deepseek-reasoner + +# Use OpenRouter for expert +export EXPERT_OPENROUTER_API_KEY=your_openrouter_api_key +ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411 +``` + + + + +## Environment Variables + +Here are all the environment variables supported for open model configuration: + +- `OPENROUTER_API_KEY`: Required for OpenRouter provider +- `DEEPSEEK_API_KEY`: Required for DeepSeek provider +- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider +- `EXPERT_DEEPSEEK_API_KEY`: API key for expert tool using DeepSeek provider + +## Notes and Best Practices + +- Set environment variables in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`) for persistence +- Consider using different models for different types of tasks (e.g., DeepSeek for reasoning, Mistral for general tasks) +- Review model performance and adjust based on your specific needs +- Keep your API keys secure and never commit them to version control diff --git a/docs/docs/quickstart/recommended.md b/docs/docs/quickstart/recommended.md index dd18cab..4df30a7 100644 --- a/docs/docs/quickstart/recommended.md +++ b/docs/docs/quickstart/recommended.md @@ -1,84 +1,52 @@ -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; +# Recommended Config -# Open Models Configuration +This configuration combines the strengths of multiple AI models to provide the best experience: -RA.Aid supports various open source model providers and configurations. This guide shows you how to configure and use different open models with RA.Aid. +- Anthropic Sonnet excels at driving the agent's core reasoning and planning +- OpenAI's models provide robust debugging and logical analysis capabilities +- Tavily web search integration allows the agent to find relevant information online -## Supported Providers +:::info +RA.Aid must be installed before using these configurations. If you haven't installed it yet, please see the [Installation Guide](installation). +::: - - +## Getting API Keys -### DeepSeek Models +To use RA.Aid with the recommended configuration, you'll need to obtain API keys from the following services: -To use DeepSeek models, you'll need a DeepSeek API key. Set it in your environment: +1. **OpenAI API Key**: Create an account at [OpenAI's platform](https://platform.openai.com) and generate an API key from your dashboard. + +2. **Anthropic API Key**: Sign up at [Anthropic's Console](https://console.anthropic.com), then generate an API key from the API Keys section. + +3. **Tavily API Key** (optional): Create an account at [Tavily](https://app.tavily.com/sign-in) and get your API key from the dashboard. + +Please keep your API keys secure and never share them publicly. Each service has its own pricing and usage terms. + +## Configuration + +Configure your API keys: ```bash -export DEEPSEEK_API_KEY=your_api_key_here +# For OpenAI (required) +export OPENAI_API_KEY=your_api_key_here + +# For Anthropic (required) +export ANTHROPIC_API_KEY=your_api_key_here + +# For web search capability (optional) +export TAVILY_API_KEY=your_api_key_here ``` -Then run RA.Aid with the deepseek provider and model: +## Basic Usage + +Start RA.Aid in interactive chat mode: ```bash -ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner +ra-aid --chat ``` -You can also access DeepSeek models through OpenRouter: +Or run with a single command: ```bash -ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 +ra-aid -m "Help me understand this code" ``` - - - - -### OpenRouter Integration - -OpenRouter provides access to various open source models. First, set your API key: - -```bash -export OPENROUTER_API_KEY=your_api_key_here -``` - -Example using Mistral: - -```bash -ra-aid -m "Your task" --provider openrouter --model mistralai/mistral-large-2411 -``` - - - - -### Expert Tool Configuration - -The expert tool can be configured to use open models for complex logic and debugging tasks: - -```bash -# Use DeepSeek for expert tool -export EXPERT_DEEPSEEK_API_KEY=your_deepseek_api_key -ra-aid -m "Your task" --expert-provider deepseek --expert-model deepseek-reasoner - -# Use OpenRouter for expert -export EXPERT_OPENROUTER_API_KEY=your_openrouter_api_key -ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411 -``` - - - - -## Environment Variables - -Here are all the environment variables supported for open model configuration: - -- `OPENROUTER_API_KEY`: Required for OpenRouter provider -- `DEEPSEEK_API_KEY`: Required for DeepSeek provider -- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider -- `EXPERT_DEEPSEEK_API_KEY`: API key for expert tool using DeepSeek provider - -## Notes and Best Practices - -- Set environment variables in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`) for persistence -- Consider using different models for different types of tasks (e.g., DeepSeek for reasoning, Mistral for general tasks) -- Review model performance and adjust based on your specific needs -- Keep your API keys secure and never commit them to version control From 4866ca99959b942abae7233e8337d0f37771d6f9 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:31:59 -0500 Subject: [PATCH 17/26] fix docs order --- docs/docs/quickstart/installation.md | 4 ++++ docs/docs/quickstart/open-models.md | 4 ++++ docs/docs/quickstart/recommended.md | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/docs/docs/quickstart/installation.md b/docs/docs/quickstart/installation.md index 027b481..d2bccea 100644 --- a/docs/docs/quickstart/installation.md +++ b/docs/docs/quickstart/installation.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 1 +--- + import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; diff --git a/docs/docs/quickstart/open-models.md b/docs/docs/quickstart/open-models.md index dd18cab..51dfc16 100644 --- a/docs/docs/quickstart/open-models.md +++ b/docs/docs/quickstart/open-models.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 3 +--- + import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; diff --git a/docs/docs/quickstart/recommended.md b/docs/docs/quickstart/recommended.md index 4df30a7..38d519b 100644 --- a/docs/docs/quickstart/recommended.md +++ b/docs/docs/quickstart/recommended.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 2 +--- + # Recommended Config This configuration combines the strengths of multiple AI models to provide the best experience: From e9fe7e363a73e938b6ba5d47a79fdf5f2758bdbd Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:33:58 -0500 Subject: [PATCH 18/26] fix gh link in docs --- docs/docusaurus.config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index a2f1392..7471d1d 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -48,7 +48,7 @@ const config: Config = { label: 'Docs', }, { - href: 'https://github.com/smallcloudai/refact-aide', + href: 'https://github.com/ai-christianson/RA.Aid', label: 'GitHub', position: 'right', }, From fc90bc740d1ff90701f800f63bfaaaf83d870901 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:42:50 -0500 Subject: [PATCH 19/26] open models docs --- docs/docs/quickstart/open-models.md | 170 +++++++++++++++++++++------- 1 file changed, 131 insertions(+), 39 deletions(-) diff --git a/docs/docs/quickstart/open-models.md b/docs/docs/quickstart/open-models.md index 51dfc16..e0d7906 100644 --- a/docs/docs/quickstart/open-models.md +++ b/docs/docs/quickstart/open-models.md @@ -7,82 +7,174 @@ import TabItem from '@theme/TabItem'; # Open Models Configuration -RA.Aid supports various open source model providers and configurations. This guide shows you how to configure and use different open models with RA.Aid. +RA.Aid supports a variety of open source and compatible model providers. This guide covers configuration options and best practices for using different models with RA.Aid. -## Supported Providers +## Overview + + + + +RA.Aid supports these model providers: + +| Provider | Description | Key Features | +|----------|-------------|--------------| +| DeepSeek | Specialized reasoning models | High performance on complex tasks | +| OpenRouter | Gateway to multiple open models | Wide model selection | +| OpenAI-compatible | API-compatible endpoints | Use with compatible hosting | +| Anthropic | Claude model family | Strong reasoning capabilities | +| Gemini | Google AI models | Competitive performance | + + + + +### Basic Configuration + +1. Set your provider's API key: +```bash +# Choose the appropriate provider +export DEEPSEEK_API_KEY=your_key +export OPENROUTER_API_KEY=your_key +export OPENAI_API_KEY=your_key +export ANTHROPIC_API_KEY=your_key +export GEMINI_API_KEY=your_key +``` + +2. Run RA.Aid with your chosen provider: +```bash +ra-aid -m "Your task" --provider --model +``` + + + + +## Provider Configuration ### DeepSeek Models -To use DeepSeek models, you'll need a DeepSeek API key. Set it in your environment: +DeepSeek offers powerful reasoning models optimized for complex tasks. ```bash +# Environment setup export DEEPSEEK_API_KEY=your_api_key_here -``` -Then run RA.Aid with the deepseek provider and model: - -```bash +# Basic usage ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner + +# With temperature control +ra-aid -m "Your task" --provider deepseek --model deepseek-reasoner --temperature 0.7 ``` -You can also access DeepSeek models through OpenRouter: - -```bash -ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 -``` - - - +**Available Models:** +- `deepseek-reasoner`: Optimized for reasoning tasks +- Access via OpenRouter: `deepseek/deepseek-r1` + + ### OpenRouter Integration -OpenRouter provides access to various open source models. First, set your API key: +OpenRouter provides access to multiple open source models through a single API. ```bash +# Environment setup export OPENROUTER_API_KEY=your_api_key_here -``` -Example using Mistral: - -```bash +# Example commands ra-aid -m "Your task" --provider openrouter --model mistralai/mistral-large-2411 +ra-aid -m "Your task" --provider openrouter --model deepseek/deepseek-r1 ``` - - +**Popular Models:** +- `mistralai/mistral-large-2411` +- `anthropic/claude-3` +- `deepseek/deepseek-r1` + + -### Expert Tool Configuration +### OpenAI-compatible Endpoints -The expert tool can be configured to use open models for complex logic and debugging tasks: +Use OpenAI-compatible API endpoints with custom hosting solutions. ```bash -# Use DeepSeek for expert tool -export EXPERT_DEEPSEEK_API_KEY=your_deepseek_api_key +# Environment setup +export OPENAI_API_KEY=your_api_key_here +export OPENAI_API_BASE=https://your-api-endpoint + +# Usage +ra-aid -m "Your task" --provider openai-compatible --model your-model-name +``` + +**Configuration Options:** +- Set custom base URL with `OPENAI_API_BASE` +- Supports temperature control +- Compatible with most OpenAI-style APIs + + + +## Advanced Configuration + + + + +### Expert Tool Configuration + +Configure the expert tool for specialized tasks: + +```bash +# DeepSeek expert +export EXPERT_DEEPSEEK_API_KEY=your_key ra-aid -m "Your task" --expert-provider deepseek --expert-model deepseek-reasoner -# Use OpenRouter for expert -export EXPERT_OPENROUTER_API_KEY=your_openrouter_api_key +# OpenRouter expert +export EXPERT_OPENROUTER_API_KEY=your_key ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411 ``` - + + + +### Temperature Settings + +Control model creativity vs determinism: + +```bash +# More deterministic (good for coding) +ra-aid -m "Your task" --temperature 0.2 + +# More creative (good for brainstorming) +ra-aid -m "Your task" --temperature 0.8 +``` + +**Note:** Not all models support temperature control. Check provider documentation. + +## Best Practices + +- Set environment variables in your shell configuration file +- Use lower temperatures (0.1-0.3) for coding tasks +- Test different models to find the best fit for your use case +- Consider using expert mode for complex programming tasks + ## Environment Variables -Here are all the environment variables supported for open model configuration: +Complete list of supported environment variables: -- `OPENROUTER_API_KEY`: Required for OpenRouter provider -- `DEEPSEEK_API_KEY`: Required for DeepSeek provider -- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider -- `EXPERT_DEEPSEEK_API_KEY`: API key for expert tool using DeepSeek provider +| Variable | Provider | Purpose | +|----------|----------|----------| +| `OPENROUTER_API_KEY` | OpenRouter | Main API access | +| `DEEPSEEK_API_KEY` | DeepSeek | Main API access | +| `OPENAI_API_KEY` | OpenAI-compatible | API access | +| `OPENAI_API_BASE` | OpenAI-compatible | Custom endpoint | +| `ANTHROPIC_API_KEY` | Anthropic | API access | +| `GEMINI_API_KEY` | Gemini | API access | +| `EXPERT_OPENROUTER_API_KEY` | OpenRouter | Expert tool | +| `EXPERT_DEEPSEEK_API_KEY` | DeepSeek | Expert tool | -## Notes and Best Practices +## Troubleshooting -- Set environment variables in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`) for persistence -- Consider using different models for different types of tasks (e.g., DeepSeek for reasoning, Mistral for general tasks) -- Review model performance and adjust based on your specific needs -- Keep your API keys secure and never commit them to version control +- Verify API keys are set correctly +- Check endpoint URLs for OpenAI-compatible setups +- Monitor API rate limits and quotas From 237e227b2ad0d4f8a51b9eca4bc4143615c72599 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 17:51:21 -0500 Subject: [PATCH 20/26] open models docs --- docs/docs/quickstart/open-models.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/docs/quickstart/open-models.md b/docs/docs/quickstart/open-models.md index e0d7906..42ce211 100644 --- a/docs/docs/quickstart/open-models.md +++ b/docs/docs/quickstart/open-models.md @@ -18,11 +18,11 @@ RA.Aid supports these model providers: | Provider | Description | Key Features | |----------|-------------|--------------| -| DeepSeek | Specialized reasoning models | High performance on complex tasks | -| OpenRouter | Gateway to multiple open models | Wide model selection | -| OpenAI-compatible | API-compatible endpoints | Use with compatible hosting | -| Anthropic | Claude model family | Strong reasoning capabilities | -| Gemini | Google AI models | Competitive performance | +| DeepSeek | Chinese hedge fund who creates sophisticated LLMs | Strong, open models like R1 | +| OpenRouter | Multi-model gateway service | Access to 100+ models, unified API interface, pay-per-token | +| OpenAI-compatible | Self-hosted model endpoints | Compatible with Llama, Mistral and other open models | +| Anthropic | Claude model series | 200k token context, strong tool use, JSON/XML parsing | +| Gemini | Google's multimodal models | Code generation in 20+ languages, parallel request support | @@ -116,11 +116,11 @@ ra-aid -m "Your task" --provider openai-compatible --model your-model-name ## Advanced Configuration - + ### Expert Tool Configuration -Configure the expert tool for specialized tasks: +Configure the expert model for specialized tasks; this usually benefits from a more powerful, slower, reasoning model: ```bash # DeepSeek expert From 28beca458abc601c0d816a42be74c473cf98dbbf Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 18:03:56 -0500 Subject: [PATCH 21/26] favicon --- assets/favicon.ico | Bin 0 -> 4286 bytes docs/docusaurus.config.ts | 2 +- docs/static/img/favicon.ico | Bin 3626 -> 4286 bytes 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 assets/favicon.ico diff --git a/assets/favicon.ico b/assets/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..d933be36fa056555c5ac5eb59c1b612909627e14 GIT binary patch literal 4286 zcmeHKO-pJ~6uoL`exVN3MxwmbH&W~q8i)`&6_X}nq9Uk>;KZ@gqzHlv(}@*|h>D=I z`giNyed1jF4s7rqk9%dGZ9zd$h##j@pivl@1;Hi=!f(+;mk<(-{Ol3; ze)+!a%fUv7qik{wWDUp~_$M@=*Xyyfv$HlAqtOVpS`CB2uo1_9*LHVz8J4d%%}S*b z^?DtLhleZqJsuApA0J_}*-$JNkw_#~;$}Inudk8M=fB0#J1Uh5Z*Ol127^m@olb{V ztA%7T$@b39&M+JfVKSMPu$-Azf zox*OnOL0kmRG&Z~Ao)mdHrp z&1N$ulL@E}voWY1{eB-VmkVyU8-u|BR;zUmGmGE$JM}*4ZI#|Dl?uqJ)hf#6^48o3 z2M4V0sQ2g|X^uo9sMTuhTt1&~Wo)O@iOx6*rGXIZ!Wgqcz`?XZgQ^tN~dA|B(j7qx`++51;#j*Z=?k literal 0 HcmV?d00001 diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index 7471d1d..f13b89f 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -56,7 +56,7 @@ const config: Config = { }, footer: { style: 'dark', - copyright: `Copyright © ${new Date().getFullYear()} My Project, Inc. Built with Docusaurus.`, + copyright: `Copyright © ${new Date().getFullYear()} AI Christianson. Built with RA.Aid and Docusaurus.`, }, prism: { theme: prismThemes.github, diff --git a/docs/static/img/favicon.ico b/docs/static/img/favicon.ico index c01d54bcd39a5f853428f3cd5aa0f383d963c484..d933be36fa056555c5ac5eb59c1b612909627e14 100644 GIT binary patch literal 4286 zcmeHKO-pJ~6uoL`exVN3MxwmbH&W~q8i)`&6_X}nq9Uk>;KZ@gqzHlv(}@*|h>D=I z`giNyed1jF4s7rqk9%dGZ9zd$h##j@pivl@1;Hi=!f(+;mk<(-{Ol3; ze)+!a%fUv7qik{wWDUp~_$M@=*Xyyfv$HlAqtOVpS`CB2uo1_9*LHVz8J4d%%}S*b z^?DtLhleZqJsuApA0J_}*-$JNkw_#~;$}Inudk8M=fB0#J1Uh5Z*Ol127^m@olb{V ztA%7T$@b39&M+JfVKSMPu$-Azf zox*OnOL0kmRG&Z~Ao)mdHrp z&1N$ulL@E}voWY1{eB-VmkVyU8-u|BR;zUmGmGE$JM}*4ZI#|Dl?uqJ)hf#6^48o3 z2M4V0sQ2g|X^uo9sMTuhTt1&~Wo)O@iOx6*rGXIZ!Wgqcz`?XZgQ^tN~dA|B(j7qx`++51;#j*Z=?k literal 3626 zcmb`Je@s(X6vrR`EK3%b%orErlDW({vnABqA zcfaS{d+xbU5JKp0*;0YOg+;Fl!eT)XRuapIwFLL`=imZCSon$`se`_<%@MB=M~KG+ z=EW^FL`w|Bo>*ktlaS^(fut!95`iG5u=SZ8nfDHO#GaTlH1-XG^;vsjUb^gWTVz0+ z^=WR1wv9-2oeR=_;fL0H7rNWqAzGtO(D;`~cX(RcN0w2v24Y8)6t`cS^_ghs`_ho? z{0ka~1Dgo8TfAP$r*ua?>$_V+kZ!-(TvEJ7O2f;Y#tezt$&R4 zLI}=-y@Z!grf*h3>}DUL{km4R>ya_I5Ag#{h_&?+HpKS!;$x3LC#CqUQ8&nM?X))Q zXAy2?`YL4FbC5CgJu(M&Q|>1st8XXLZ|5MgwgjP$m_2Vt0(J z&Gu7bOlkbGzGm2sh?X`){7w69Y$1#@P@7DF{ZE=4%T0NDS)iH`tiPSKpDNW)zmtn( zw;4$f>k)4$LBc>eBAaTZeCM2(iD+sHlj!qd z2GjRJ>f_Qes(+mnzdA^NH?^NB(^o-%Gmg$c8MNMq&`vm@9Ut;*&$xSD)PKH{wBCEC z4P9%NQ;n2s59ffMn8*5)5AAg4-93gBXBDX`A7S& zH-|%S3Wd%T79fk-e&l`{!?lve8_epXhE{d3Hn$Cg!t=-4D(t$cK~7f&4s?t7wr3ZP z*!SRQ-+tr|e1|hbc__J`k3S!rMy<0PHy&R`v#aJv?`Y?2{avK5sQz%=Us()jcNuZV z*$>auD4cEw>;t`+m>h?f?%VFJZj8D|Y1e_SjxG%J4{-AkFtT2+ZZS5UScS~%;dp!V>)7zi`w(xwSd*FS;Lml=f6hn#jq)2is4nkp+aTrV?)F6N z>DY#SU0IZ;*?Hu%tSj4edd~kYNHMFvS&5}#3-M;mBCOCZL3&;2obdG?qZ>rD|zC|Lu|sny76pn2xl|6sk~Hs{X9{8iBW zwiwgQt+@hi`FYMEhX2 Date: Fri, 14 Feb 2025 19:03:59 -0500 Subject: [PATCH 22/26] update README with links to docs --- README.md | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 7e937c7..e29d8b4 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,20 @@ Here's a demo of RA.Aid adding a feature to itself: RA.Aid Demo +## Documentation + +Complete documentation is available at https://docs.ra-aid.ai + +Key sections: +- [Installation Guide](https://docs.ra-aid.ai/quickstart/installation) +- [Recommended Configuration](https://docs.ra-aid.ai/quickstart/recommended) +- [Open Models Setup](https://docs.ra-aid.ai/quickstart/open-models) +- [Usage Examples](https://docs.ra-aid.ai/category/usage) +- [Contributing Guide](https://docs.ra-aid.ai/contributing) +- [Getting Help](https://docs.ra-aid.ai/getting-help) + +## Table of Contents + - [Features](#features) - [Installation](#installation) - [Usage](#usage) @@ -91,14 +105,7 @@ pip install ra-aid ### Prerequisites -Before using RA.Aid, you'll need: - -1. Python package `aider` installed and available in your PATH: -```bash -pip install aider-chat -``` - -2. API keys for the required AI services: +Before using RA.Aid, you'll need API keys for the required AI services: ```bash # Set up API keys based on your preferred provider: @@ -106,7 +113,7 @@ pip install aider-chat # For Anthropic Claude models (recommended) export ANTHROPIC_API_KEY=your_api_key_here -# For OpenAI models +# For OpenAI models (optional) export OPENAI_API_KEY=your_api_key_here # For OpenRouter provider (optional) @@ -133,6 +140,8 @@ You can get your API keys from: - OpenRouter API key: https://openrouter.ai/keys - Gemini API key: https://aistudio.google.com/app/apikey +Complete installation documentation is available in our [Installation Guide](https://docs.ra-aid.ai/quickstart/installation). + ## Usage RA.Aid is designed to be simple yet powerful. Here's how to use it: @@ -148,6 +157,8 @@ ra-aid -m "Explain the authentication flow" --research-only ra-aid -m "Add new feature" --verbose ``` +More information is available in our [Usage Examples](https://docs.ra-aid.ai/category/usage). + ### Command Line Options - `-m, --message`: The task or query to be executed (required except in chat mode) @@ -426,6 +437,8 @@ Note: For `AIDER_FLAGS`, you can specify flags with or without the leading `--`. - Model configuration is done via command line arguments: `--provider` and `--model` - The `--model` argument is required for all providers except Anthropic (which defaults to `claude-3-5-sonnet-20241022`) +More information is available in our [Open Models Setup](https://docs.ra-aid.ai/quickstart/open-models) guide. + ## Architecture RA.Aid implements a three-stage architecture for handling development and research tasks: From 7624d3c006980296255b769652692e82ebf40f87 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 14 Feb 2025 19:08:17 -0500 Subject: [PATCH 23/26] update README --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index e29d8b4..e62e546 100644 --- a/README.md +++ b/README.md @@ -535,6 +535,8 @@ git push origin feature/your-feature-name - Keep commits focused and message clear - Ensure all tests pass before submitting PR +More information is available in our [Contributing Guide](https://docs.ra-aid.ai/contributing). + ## License This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details. From e9d578c2dc83e636a3ceebb6ae44cea1f8559973 Mon Sep 17 00:00:00 2001 From: Ariel Frischer Date: Sat, 15 Feb 2025 03:57:31 -0800 Subject: [PATCH 24/26] chore(dependabot): add dependabot configuration for daily pip updates to automate dependency management (#94) --- .github/dependabot.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b38df29 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" From 13aa77a389cc6de955627adcb764ac5f80c6585b Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Mon, 17 Feb 2025 14:22:44 -0500 Subject: [PATCH 25/26] add vercel analytics to docs --- docs/docusaurus.config.ts | 4 +++ docs/package-lock.json | 61 +++++++++++++++++++++++++++++++++++++++ docs/package.json | 1 + 3 files changed, 66 insertions(+) diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index f13b89f..4b3de14 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -17,6 +17,10 @@ const config: Config = { locales: ['en'], }, + plugins: [ + '@docusaurus/plugin-vercel-analytics' + ], + presets: [ [ 'classic', diff --git a/docs/package-lock.json b/docs/package-lock.json index d3a5f04..248e2d8 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,6 +9,7 @@ "version": "0.0.0", "dependencies": { "@docusaurus/core": "3.7.0", + "@docusaurus/plugin-vercel-analytics": "^3.7.0", "@docusaurus/preset-classic": "3.7.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", @@ -3529,6 +3530,66 @@ "react-dom": "^18.0.0 || ^19.0.0" } }, + "node_modules/@docusaurus/plugin-vercel-analytics": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-vercel-analytics/-/plugin-vercel-analytics-3.7.0.tgz", + "integrity": "sha512-zEOsqNI3oj4WRO9Dbzsar9fctwAl60PZJqhu14X5W3z5zT/E1TFKrHW/oJHU/a1r5o9K2cFsSNdDn2tyuaFJoQ==", + "license": "MIT", + "dependencies": { + "@docusaurus/core": "3.7.0", + "@docusaurus/logger": "3.7.0", + "@docusaurus/types": "3.7.0", + "@docusaurus/utils": "3.7.0", + "@docusaurus/utils-validation": "3.7.0", + "@vercel/analytics": "^1.1.1", + "tslib": "^2.6.0" + }, + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + } + }, + "node_modules/@docusaurus/plugin-vercel-analytics/node_modules/@vercel/analytics": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@vercel/analytics/-/analytics-1.5.0.tgz", + "integrity": "sha512-MYsBzfPki4gthY5HnYN7jgInhAZ7Ac1cYDoRWFomwGHWEX7odTEzbtg9kf/QSo7XEsEAqlQugA6gJ2WS2DEa3g==", + "license": "MPL-2.0", + "peerDependencies": { + "@remix-run/react": "^2", + "@sveltejs/kit": "^1 || ^2", + "next": ">= 13", + "react": "^18 || ^19 || ^19.0.0-rc", + "svelte": ">= 4", + "vue": "^3", + "vue-router": "^4" + }, + "peerDependenciesMeta": { + "@remix-run/react": { + "optional": true + }, + "@sveltejs/kit": { + "optional": true + }, + "next": { + "optional": true + }, + "react": { + "optional": true + }, + "svelte": { + "optional": true + }, + "vue": { + "optional": true + }, + "vue-router": { + "optional": true + } + } + }, "node_modules/@docusaurus/preset-classic": { "version": "3.7.0", "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.7.0.tgz", diff --git a/docs/package.json b/docs/package.json index 32ddd41..8bf5596 100644 --- a/docs/package.json +++ b/docs/package.json @@ -16,6 +16,7 @@ }, "dependencies": { "@docusaurus/core": "3.7.0", + "@docusaurus/plugin-vercel-analytics": "^3.7.0", "@docusaurus/preset-classic": "3.7.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", From e8e4dac038c40815a7f0873c34a414617c76b11a Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Mon, 17 Feb 2025 18:11:33 -0500 Subject: [PATCH 26/26] add base latency model param --- ra_aid/models_params.py | 305 +++++++++++++++++++++++----------------- 1 file changed, 174 insertions(+), 131 deletions(-) diff --git a/ra_aid/models_params.py b/ra_aid/models_params.py index 21e04ae..5c80e0c 100644 --- a/ra_aid/models_params.py +++ b/ra_aid/models_params.py @@ -4,278 +4,305 @@ List of model parameters DEFAULT_TOKEN_LIMIT = 100000 DEFAULT_TEMPERATURE = 0.7 +DEFAULT_BASE_LATENCY = 180 models_params = { "openai": { - "gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-0613": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-32k": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4o": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4o-2024-08-06": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4o-2024-05-13": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "o1-preview": {"token_limit": 128000, "supports_temperature": False}, - "o1-mini": {"token_limit": 128000, "supports_temperature": False}, - "o1-preview": {"token_limit": 128000, "supports_temperature": False}, - "o1": {"token_limit": 200000, "supports_temperature": False}, - "o3-mini": {"token_limit": 200000, "supports_temperature": False}, + "gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-0613": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-32k": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4o": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4o-2024-08-06": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4o-2024-05-13": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o1-preview": {"token_limit": 128000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o1-mini": {"token_limit": 128000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o1-preview": {"token_limit": 128000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o1": {"token_limit": 200000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o3-mini": {"token_limit": 200000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "azure_openai": { - "gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-0613": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-32k": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4o": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "chatgpt-4o-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "o1-preview": {"token_limit": 128000, "supports_temperature": False}, - "o1-mini": {"token_limit": 128000, "supports_temperature": False}, + "gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-0613": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-32k": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4o": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "chatgpt-4o-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o1-preview": {"token_limit": 128000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "o1-mini": {"token_limit": 128000, "supports_temperature": False, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "google_genai": { - "gemini-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "gemini-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "gemini-1.5-flash-latest": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, - "gemini-1.5-pro-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "models/embedding-001": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "gemini-1.5-pro-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "models/embedding-001": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "google_vertexai": { - "gemini-1.5-flash": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gemini-1.5-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gemini-1.0-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "gemini-1.5-flash": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gemini-1.5-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gemini-1.0-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "ollama": { - "command-r": {"token_limit": 12800, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "codellama": {"token_limit": 16000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "dbrx": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "deepseek-coder:33b": {"token_limit": 16000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "falcon": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama2": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama2:7b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama2:13b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama2:70b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3:8b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3:70b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.1": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.1:8b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.1:70b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "lama3.1:405b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.2": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.2:1b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.2:3b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3.3:70b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "scrapegraph": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mistral-small": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mistral-openorca": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mistral-large": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "grok-1": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llava": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mixtral:8x22b-instruct": {"token_limit": 65536, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "nomic-embed-text": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "nous-hermes2:34b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "orca-mini": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "phi3:3.8b": {"token_limit": 12800, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "phi3:14b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:0.5b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:1.8b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:4b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:14b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:32b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:72b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "qwen:110b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "stablelm-zephyr": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "wizardlm2:8x22b": {"token_limit": 65536, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mistral": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gemma2": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gemma2:9b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gemma2:27b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "command-r": {"token_limit": 12800, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "codellama": {"token_limit": 16000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "dbrx": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "deepseek-coder:33b": {"token_limit": 16000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "falcon": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama2": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama2:7b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama2:13b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama2:70b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3:8b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3:70b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.1": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.1:8b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.1:70b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "lama3.1:405b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.2": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.2:1b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.2:3b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3.3:70b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "scrapegraph": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mistral-small": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mistral-openorca": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mistral-large": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "grok-1": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llava": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mixtral:8x22b-instruct": {"token_limit": 65536, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "nomic-embed-text": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "nous-hermes2:34b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "orca-mini": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "phi3:3.8b": {"token_limit": 12800, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "phi3:14b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:0.5b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:1.8b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:4b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:14b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:32b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:72b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "qwen:110b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "stablelm-zephyr": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "wizardlm2:8x22b": {"token_limit": 65536, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mistral": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gemma2": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gemma2:9b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gemma2:27b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, # embedding models "shaw/dmeta-embedding-zh-small-q4": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "shaw/dmeta-embedding-zh-q4": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "chevalblanc/acge_text_embedding": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "martcreation/dmeta-embedding-zh": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, - "snowflake-arctic-embed": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mxbai-embed-large": {"token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "snowflake-arctic-embed": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mxbai-embed-large": {"token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, - "oneapi": {"qwen-turbo": {"token_limit": 6000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}}, + "oneapi": {"qwen-turbo": {"token_limit": 6000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}}, "nvidia": { - "meta/llama3-70b-instruct": {"token_limit": 419, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "meta/llama3-8b-instruct": {"token_limit": 419, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "nemotron-4-340b-instruct": {"token_limit": 1024, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "databricks/dbrx-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "google/codegemma-7b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "google/gemma-2b": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "google/gemma-7b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "google/recurrentgemma-2b": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "meta/codellama-70b": {"token_limit": 16384, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "meta/llama2-70b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "meta/llama3-70b-instruct": {"token_limit": 419, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "meta/llama3-8b-instruct": {"token_limit": 419, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "nemotron-4-340b-instruct": {"token_limit": 1024, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "databricks/dbrx-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "google/codegemma-7b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "google/gemma-2b": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "google/gemma-7b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "google/recurrentgemma-2b": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "meta/codellama-70b": {"token_limit": 16384, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "meta/llama2-70b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "microsoft/phi-3-mini-128k-instruct": { "token_limit": 122880, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "mistralai/mistral-7b-instruct-v0.2": { "token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, - "mistralai/mistral-large": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "mistralai/mistral-large": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "mistralai/mixtral-8x22b-instruct-v0.1": { "token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "mistralai/mixtral-8x7b-instruct-v0.1": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, - "snowflake/arctic": {"token_limit": 16384, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "snowflake/arctic": {"token_limit": 16384, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "groq": { - "llama3-8b-8192": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "llama3-70b-8192": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "mixtral-8x7b-32768": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "gemma-7b-it": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "claude-3-haiku-20240307'": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "llama3-8b-8192": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "llama3-70b-8192": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "mixtral-8x7b-32768": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "gemma-7b-it": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "claude-3-haiku-20240307'": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "toghetherai": { "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "mistralai/Mixtral-8x22B-Instruct-v0.1": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "stabilityai/stable-diffusion-xl-base-1.0": { "token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "NousResearch/Hermes-3-Llama-3.1-405B-Turbo": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "Gryphe/MythoMax-L2-13b-Lite": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, - "Salesforce/Llama-Rank-V1": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "Salesforce/Llama-Rank-V1": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "meta-llama/Meta-Llama-Guard-3-8B": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "meta-llama/Meta-Llama-3-70B-Instruct-Turbo": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "meta-llama/Llama-3-8b-chat-hf": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "meta-llama/Llama-3-70b-chat-hf": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, "Qwen/Qwen2-72B-Instruct": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY }, - "google/gemma-2-27b-it": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "google/gemma-2-27b-it": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "anthropic": { - "claude_instant": {"token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "claude2": {"token_limit": 9000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "claude2.1": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "claude3": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "claude3.5": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "claude-3-opus-20240229": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "claude_instant": {"token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "claude2": {"token_limit": 9000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "claude2.1": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "claude3": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "claude3.5": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "claude-3-opus-20240229": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "claude-3-sonnet-20240229": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "claude-3-haiku-20240307": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "claude-3-5-sonnet-20240620": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "claude-3-5-sonnet-20241022": { "token_limit": 200000, "supports_temperature": True, "default_temperature": 1.0, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "claude-3-5-haiku-latest": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, }, "bedrock": { @@ -283,93 +310,109 @@ models_params = { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "anthropic.claude-3-sonnet-20240229-v1:0": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "anthropic.claude-3-opus-20240229-v1:0": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "anthropic.claude-3-5-sonnet-20240620-v1:0": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "claude-3-5-haiku-latest": { "token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, - "anthropic.claude-v2:1": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "anthropic.claude-v2": {"token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "anthropic.claude-v2:1": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "anthropic.claude-v2": {"token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "anthropic.claude-instant-v1": { "token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "meta.llama3-8b-instruct-v1:0": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "meta.llama3-70b-instruct-v1:0": { "token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, - "meta.llama2-13b-chat-v1": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "meta.llama2-70b-chat-v1": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "meta.llama2-13b-chat-v1": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "meta.llama2-70b-chat-v1": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "mistral.mistral-7b-instruct-v0:2": { "token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "mistral.mixtral-8x7b-instruct-v0:1": { "token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "mistral.mistral-large-2402-v1:0": { "token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "mistral.mistral-small-2402-v1:0": { "token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "amazon.titan-embed-text-v1": { "token_limit": 8000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, "amazon.titan-embed-text-v2:0": { "token_limit": 8000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, - "cohere.embed-english-v3": {"token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "cohere.embed-english-v3": {"token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, "cohere.embed-multilingual-v3": { "token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY, }, }, "mistralai": { - "mistral-large-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "open-mistral-nemo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, - "codestral-latest": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}, + "mistral-large-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "open-mistral-nemo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, + "codestral-latest": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, "latency_coefficient": DEFAULT_BASE_LATENCY}, }, "togetherai": { "Meta-Llama-3.1-70B-Instruct-Turbo": { "token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE, + "latency_coefficient": DEFAULT_BASE_LATENCY } }, }