From 1941d4d3c1580f5eaad0b9d0ebb4a6d256ed0bf5 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 3 Oct 2024 12:54:44 -0700 Subject: [PATCH] init commit --- .env.example | 14 + .eslintrc.json | 3 + .gitignore | 38 + .prettierrc | 19 + LICENSE | 21 + README.md | 26 + components.json | 20 + langgraph.json | 9 + next.config.mjs | 4 + package.json | 66 + postcss.config.mjs | 8 + public/lc_logo.jpg | Bin 0 -> 30595 bytes src/agent/index.ts | 315 + src/agent/utils.ts | 44 + src/app/api/[..._path]/route.ts | 65 + src/app/api/store/get/route.ts | 67 + src/app/api/system_rules/get/route.ts | 85 + src/app/api/system_rules/put/route.ts | 82 + src/app/favicon.ico | Bin 0 -> 15406 bytes src/app/globals.css | 103 + src/app/layout.tsx | 22 + src/app/page.tsx | 78 + src/components/AssistantsDropdown.tsx | 155 + src/components/ContentComposer.tsx | 212 + src/components/GeneratedRulesDialog.tsx | 110 + src/components/NewAssistantDialog.tsx | 116 + src/components/Primitives.tsx | 347 ++ src/components/RuleInfoDialog.tsx | 61 + src/components/SystemRulesDialog.tsx | 88 + src/components/WelcomeDialog.tsx | 287 + .../ui/assistant-ui/markdown-text.tsx | 224 + .../ui/assistant-ui/syntax-highlighter.tsx | 24 + .../ui/assistant-ui/tooltip-icon-button.tsx | 53 + src/components/ui/avatar.tsx | 50 + src/components/ui/button.tsx | 57 + src/components/ui/dialog.tsx | 126 + src/components/ui/dropdown-menu.tsx | 205 + src/components/ui/input.tsx | 25 + src/components/ui/progress.tsx | 28 + src/components/ui/select.tsx | 164 + src/components/ui/textarea.tsx | 24 + src/components/ui/toast.tsx | 129 + src/components/ui/toaster.tsx | 35 + src/components/ui/tooltip.tsx | 30 + src/constants.ts | 11 + src/hooks/use-toast.ts | 191 + src/hooks/useGraph.tsx | 182 + src/hooks/useRules.tsx | 132 + src/hooks/useUser.tsx | 29 + src/hooks/utils.ts | 8 + src/lib/convert_messages.ts | 76 + src/lib/cookies.ts | 22 + src/lib/process_event.ts | 143 + src/lib/store.ts | 5 + src/lib/utils.ts | 6 + src/types.ts | 21 + tailwind.config.ts | 74 + tsconfig.json | 26 + yarn.lock | 5115 +++++++++++++++++ 59 files changed, 9680 insertions(+) create mode 100644 .env.example create mode 100644 .eslintrc.json create mode 100644 .gitignore create mode 100644 .prettierrc create mode 100644 LICENSE create mode 100644 README.md create mode 100644 components.json create mode 100644 langgraph.json create mode 100644 next.config.mjs create mode 100644 package.json create mode 100644 postcss.config.mjs create mode 100644 public/lc_logo.jpg create mode 100644 src/agent/index.ts create mode 100644 src/agent/utils.ts create mode 100644 src/app/api/[..._path]/route.ts create mode 100644 src/app/api/store/get/route.ts create mode 100644 src/app/api/system_rules/get/route.ts create mode 100644 src/app/api/system_rules/put/route.ts create mode 100644 src/app/favicon.ico create mode 100644 src/app/globals.css create mode 100644 src/app/layout.tsx create mode 100644 src/app/page.tsx create mode 100644 src/components/AssistantsDropdown.tsx create mode 100644 src/components/ContentComposer.tsx create mode 100644 src/components/GeneratedRulesDialog.tsx create mode 100644 src/components/NewAssistantDialog.tsx create mode 100644 src/components/Primitives.tsx create mode 100644 src/components/RuleInfoDialog.tsx create mode 100644 src/components/SystemRulesDialog.tsx create mode 100644 src/components/WelcomeDialog.tsx create mode 100644 src/components/ui/assistant-ui/markdown-text.tsx create mode 100644 src/components/ui/assistant-ui/syntax-highlighter.tsx create mode 100644 src/components/ui/assistant-ui/tooltip-icon-button.tsx create mode 100644 src/components/ui/avatar.tsx create mode 100644 src/components/ui/button.tsx create mode 100644 src/components/ui/dialog.tsx create mode 100644 src/components/ui/dropdown-menu.tsx create mode 100644 src/components/ui/input.tsx create mode 100644 src/components/ui/progress.tsx create mode 100644 src/components/ui/select.tsx create mode 100644 src/components/ui/textarea.tsx create mode 100644 src/components/ui/toast.tsx create mode 100644 src/components/ui/toaster.tsx create mode 100644 src/components/ui/tooltip.tsx create mode 100644 src/constants.ts create mode 100644 src/hooks/use-toast.ts create mode 100644 src/hooks/useGraph.tsx create mode 100644 src/hooks/useRules.tsx create mode 100644 src/hooks/useUser.tsx create mode 100644 src/hooks/utils.ts create mode 100644 src/lib/convert_messages.ts create mode 100644 src/lib/cookies.ts create mode 100644 src/lib/process_event.ts create mode 100644 src/lib/store.ts create mode 100644 src/lib/utils.ts create mode 100644 src/types.ts create mode 100644 tailwind.config.ts create mode 100644 tsconfig.json create mode 100644 yarn.lock diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..4733bcdf --- /dev/null +++ b/.env.example @@ -0,0 +1,14 @@ +# LangSmith tracing +LANGCHAIN_TRACING_V2=true +LANGCHAIN_API_KEY= + +# LLM API keys +ANTHROPIC_API_KEY= + +# Vercel KV stores. Used for system prompt storage. +KV_REST_API_URL= +KV_REST_API_TOKEN= + +# LangGraph Deployment +LANGGRAPH_API_URL= +NEXT_PUBLIC_LANGGRAPH_GRAPH_ID="agent" diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 00000000..bffb357a --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "next/core-web-vitals" +} diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..55ec8d18 --- /dev/null +++ b/.gitignore @@ -0,0 +1,38 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local +.env + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts + diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..ba08ff04 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,19 @@ +{ + "$schema": "https://json.schemastore.org/prettierrc", + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": false, + "quoteProps": "as-needed", + "jsxSingleQuote": false, + "trailingComma": "es5", + "bracketSpacing": true, + "arrowParens": "always", + "requirePragma": false, + "insertPragma": false, + "proseWrap": "preserve", + "htmlWhitespaceSensitivity": "css", + "vueIndentScriptAndStyle": false, + "endOfLine": "lf" +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..5d8d7cda --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) LangChain, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..fb3deca6 --- /dev/null +++ b/README.md @@ -0,0 +1,26 @@ +# Streaming Messages + +This project contains a Next.js app, with API routes to hit a LangGraph Cloud deployment and demonstrate exactly how the different streaming types work. +To change the streaming type, click on the settings (⚙️) icon in the top right corner of the app and select the desired streaming type. + +## [YouTube Video](https://youtu.be/wjn5tFbLgwA) + +## Setup + +To setup the project, install the dependencies: + +```bash +yarn install +``` + +## Environment variables + +The streaming messages project only requires your LangChain API key, the LangGraph Cloud deployment URL, and the name of your graph. + +Once you have these, create a `.env` file in this directory and add the following: + +```bash +LANGGRAPH_API_URL=http://localhost:8123 # Or your production URL +LANGCHAIN_API_KEY=YOUR_API_KEY +NEXT_PUBLIC_LANGGRAPH_GRAPH_ID=YOUR_GRAPH_ID +``` diff --git a/components.json b/components.json new file mode 100644 index 00000000..42f059b5 --- /dev/null +++ b/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "tailwind.config.ts", + "css": "src/app/globals.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} \ No newline at end of file diff --git a/langgraph.json b/langgraph.json new file mode 100644 index 00000000..c7521d63 --- /dev/null +++ b/langgraph.json @@ -0,0 +1,9 @@ +{ + "node_version": "20", + "dockerfile_lines": [], + "dependencies": ["."], + "graphs": { + "agent": "./src/agent/index.ts:buildGraph" + }, + "env": ".env" +} diff --git a/next.config.mjs b/next.config.mjs new file mode 100644 index 00000000..4678774e --- /dev/null +++ b/next.config.mjs @@ -0,0 +1,4 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = {}; + +export default nextConfig; diff --git a/package.json b/package.json new file mode 100644 index 00000000..3f37e55f --- /dev/null +++ b/package.json @@ -0,0 +1,66 @@ +{ + "name": "streaming_chat_frontend", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint", + "format": "prettier --config .prettierrc --write \"src\"" + }, + "dependencies": { + "@assistant-ui/react": "^0.5.63", + "@assistant-ui/react-langgraph": "^0.0.5", + "@assistant-ui/react-markdown": "^0.2.12", + "@assistant-ui/react-syntax-highlighter": "^0.0.11", + "@langchain/anthropic": "^0.3.1", + "@langchain/core": "^0.3.3", + "@langchain/langgraph": "^0.2.10", + "@langchain/langgraph-sdk": "^0.0.14", + "@radix-ui/react-avatar": "^1.1.0", + "@radix-ui/react-dialog": "^1.1.1", + "@radix-ui/react-dropdown-menu": "^2.1.1", + "@radix-ui/react-icons": "^1.3.0", + "@radix-ui/react-progress": "^1.1.0", + "@radix-ui/react-select": "^2.1.1", + "@radix-ui/react-slot": "^1.1.0", + "@radix-ui/react-toast": "^1.2.1", + "@radix-ui/react-tooltip": "^1.1.2", + "@supabase/supabase-js": "^2.45.4", + "@types/react-syntax-highlighter": "^15.5.13", + "@vercel/kv": "^2.0.0", + "class-variance-authority": "^0.7.0", + "clsx": "^2.1.1", + "js-cookie": "^3.0.5", + "lucide-react": "^0.441.0", + "next": "14.2.7", + "react": "^18", + "react-dom": "^18", + "react-json-view": "^1.21.3", + "react-markdown": "^9.0.1", + "react-syntax-highlighter": "^15.5.0", + "rehype-katex": "^7.0.1", + "remark-gfm": "^4.0.0", + "remark-math": "^6.0.0", + "tailwind-merge": "^2.5.2", + "tailwind-scrollbar-hide": "^1.1.7", + "tailwindcss-animate": "^1.0.7", + "uuid": "^10.0.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/js-cookie": "^3.0.6", + "@types/node": "^20", + "@types/react": "^18", + "@types/react-dom": "^18", + "@types/uuid": "^10.0.0", + "eslint": "^8", + "eslint-config-next": "14.2.7", + "postcss": "^8", + "prettier": "^3.3.3", + "tailwind-scrollbar": "^3.1.0", + "tailwindcss": "^3.4.1", + "typescript": "^5" + } +} diff --git a/postcss.config.mjs b/postcss.config.mjs new file mode 100644 index 00000000..1a69fd2a --- /dev/null +++ b/postcss.config.mjs @@ -0,0 +1,8 @@ +/** @type {import('postcss-load-config').Config} */ +const config = { + plugins: { + tailwindcss: {}, + }, +}; + +export default config; diff --git a/public/lc_logo.jpg b/public/lc_logo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..deda5068762128b4ca7cfb01607ae725bffaa900 GIT binary patch literal 30595 zcmeFYcUV)|`Y#@JY-1ZlP^6uK5k#6uhX6ApMJ0iQbV3=40s%q~B?NG06schZX#tWM zTBHjkp~EO8QX>QsNgzlG7-E3XA(R_uzUQ1d=efV<-rt}1dCtzm-fQihtoO6twbqCA zmVNmC@B`qhd**k|0Y{Di07rx$z#$Xx1aR!=(G#amojP&i)N$e8ABXUA^5hBOtv_#^ zIQc)<6DPj>TsnRF^r;hP&V706+=)}CPaQe-#TO@zfByELKmPxHKKvaZa_ZO<;pd1P z;Hb!vV{VBcnwH7p^ux{AY z_UAX#{IDXbxUfk{p{#!w2snP?*cT^{eund{$Y%jZPnXGnj_`Wn+F$z ze|-Miu&9{at^2jFr&lh0?|DK#B)xEkuX)?{r@A*I;)e{tSs}!uBF97kCV;0ObnU-t*X5!9zgV?IxY# z7~a@;S}%{h$}QnXnO7HxJG0qq}Ylf<7pA|@K_$WnFa zoj#yVa1V#~dz_-|`D$wAjKLn2S1sifJS(Wu!zJBbc{1toq4>Zd2u@!4nIP3;pP>IY@dO42&LAIa$) zbbqitP*sJSwEL5}Um9O+1u`{$RS&FATZ_73n`Pw;iI|@n_ZV$Gw({Ztx)%7UX3_n~ z_PsdGh05#O-mkYtFUJ}HQJM1niYiqt|9Y&k)3E8dp$^!)W|FhnMvnIjyuu{@mj48^@>6X|mVutoHUR2kbEg%DdT* zV=v+cV&VIl`elCA#{P z4gvp~5JRM*&I^xJ7hZ20#f^VRiq_!}n=M8^&!HOOkhgzcH@5-^Sosf>I(^-#?>=rF z0_ME?p9R)u#N-4@?%|~Kdqr1Jt&Vp!*%}yaU`Ke753Vg;V{;+!o*}U0r>f%#sTrml zAo7s>eD#l-Eq8Zx*Lv1Ex3Q9fBDbGL3HuOEL@{V5(CF$jV*6P`$70rSQL2~Tl%G!s zRh!q)auyX9v+^D<>RXj*5m1A@BfuDn2C^C)U86m==Z9xIdRk1%sqR6GD~no9&>>7p zcz2}0lYa zvSIF1>neM;`7N2PSvt+XOt2<3Y0hn^$`bJjFLFO!cxF7b!g3z=(HhQbcis>&C@_uQ z?9Yp)#}&C_DbR-r6*iytHye%T?0ulM#S@XYn#{CAbLiuS&^r6)EbpIUhl93B zu2(qSVPlE8?a!k6of3052wU5{!rE4rVm4*sUG9oYhE^Ng(&BDap`VYybNFJ5LesKk z=ON&WS+N{Cr*;?K=4N%S);E|^7jrFq{kgQ42Ah>9xEM}&(o;Tb9Nf^dpIK@3V=3Z& zh0?Y!T{;tvp4c7xx-q?Nbz5>~!dSg*IwL@}sCrGC>_*J-leNACcdZqnjo;*n$n=^I z~i~h3P%_$-V&dup!@qJ)?fHumD>vic!Xxp{2F zXT2Wdl#EPWt4I?I%(zyn>@uQ2BjStA{G4)gcMBzCBitClGRkGu$XgaA41g zw@!Y*>AYdT+O;1DdsY?aH>nG0VUYQUX$8*|U519+Fw2PH&1dx(H+M-6BIng&*p9s4 z?4T17E(zyxMP>Twein5P0I0i>>Z3sivGT8>IyKKMsxnSO7Q><`&Xo?j{qaUw2@Azx z*f_0uSqU+L_9E60-)D!4eO5jcp|W5vIlE=MAhp?ZaGjtx*#~y{XXpO+g=678OSc<` zz9wsXoERY&QAa!x26^OeSefP_An#hy+zh&Y!O)z!Z}@ac*O`pSq&}GME+%V}YHUdx z)Y;tCN~>Z22Bt4m05_7;C178NAG_Td)<}>t5etIx&4)#3JF06n=}gn9Yxrm|kC)5z zVaSYUXspSsmc-|#I@CXdRmtiaDi{6W`xl?Lw z2sw)+?bLdFBi5#%;OB!G0CoLw@H8|GK_Kh9iCoe7mb*{D{HSSj4CCP}Q1-IANO7pI z)^?$?8$ww+(1|1Rve=?tz*xuD4&K^!b$7OL9~=u}1Dlq1?7~VDIo`-FKc>A%M+%ja z5;$1{kNu*Hd^a%>ogpd-0)^$|U~4tl8b+R`j2Hg~{XZ(lZZ6@PdEQrOv(t?0=xF5l zEl8XNzS|j%YEsZ;D9hNRNu*&BL#y}nQ_~VeiC_+4z1@E&&^3}jqDx$kVh|J1ZQk1f zgynN>ozov&u5M@VhFbr`xnnz_%Q z(U^-TM5j)oCJcY#v|p}iU50aiK5vBAskjD>b#VyiNbA=@e5sMF2!bgc7mZch-V!77 zsoIU<@m^733`4MjpOMZWMyEfEiTXIl?5}3l3a3o;6s4yT4G*|WfN^!YOS;=PI)5VN zR1YurHk1%s7m!_hSxru_w(t2QV2TWymKN7S2jxrDJj#6_90+=_u`j=+TUjMYeA6nphmbP$V!rVsfXn; z>4J4%R#;r(_ISV|QMBw)i;GnA;g1MDM}e3)(RZa2aJ9MU`F8O@*%_bk72FFX)z3)bW-PqN_vT2QG?{F8qmreT* zU))J@dHf#}^q*B|!K-E2_@LcgEBT7ykF)R@&AF<;$o-V)T}#T?o!os&t*Ai}WYFGB zqrg@~q}}`aLr;zHiq#)CBhC(nB~>Zu{v@>tK&t6%|XKxj{H8h z1}KONl#ao5V>Sa<6Ic8*#ArU-*z}l&;+erD<3%QQKknLQP&RaP3uHaEnyUJ=;^3i$ ztG%pVuS)!}_c8~17epDYhHfQ;SuCeO&7W;dyrd?YyeCH_*6x>k<@R}cMlIeQ4#CWp zn0DLrC@2_yIKuzsvo?L$kEWy6ct?%z44y^Y?>}mwBP^jx6%n^$0&v} zyJ*zKi`Hqp=uuT0D6l^SIOM(d3_TdC>LA5p?^)#zPfGE{XYB-hN&F1jwgcO?ihgkM z+SuRzIY$3kI4x<|i3{D)ql*GHZ^2W3>Zu zY9cQHHyi#0d)mU|^8WtEFewxJH~jfzzMF$YH^|958pcWYRL#&LZy_+DBLtkw-lTzp zds-4jwEWO?F(ans*l8r4dSkCTuq)E#Hp6< z+WlSA*rp8EY_gM`q4CUdRDT-dO?-tea|tX7qGNm#0Q5m{$ZD}e8z2u zp1QI)rA6LE=Vgmmw=-R!qZHYCjGTAW&PM-o{DS@Ec(pCn*W09XW;~4|iUxj+KCV~C zP`*qLE zl_hrbjOiT~^^V43;XSNi^>^pKdH0`V|3{rKaAxewJUhOD*?S+>>>td1*l%dQ#&$|G zbZ6O<-R4JD4TqO4zI&sUP3JE6qT7`233>HpZ$m}0SS$iR#dhM}=vc1r2#_ahYvLSX zvclzx`8jnS{;`@+@J$N;I4Dd4H#by7>ZU(tzSRzAKRD^)*U}tuG(YQtyQoFW zS#2|_?YJ|ThJ?JQyy$;F#Pn+nFT^a#<|*XaqLJ=cheReOyK=XI?2}rBo>VY?>zo}w zb@8gZA)ei!a9%0Cb|qhOoe+ieH{8f~$l=O_70kpWoVB_LkGA9Ddkl6$-PZ#$@uK?$ z17%GuPu*MRnV-~xs z%W5Z9KI@?b4|j!$b`cuqCoH~mCXJ@@UXJXr$<)drljX9Gd$e2o*8+sRjaw7YQjC@@ z&7)`SHufjTcYatngM{tZS0}PNt97hnxBjMM^Y}B^|IKk;!6E9yLetx6S7jOhQ8t}h z*qpLAOspLl=IeRK)@tD)bY@J+DWFOO1PDptthndzf`NO=_8l&}D&lAt7b9YSM)p}U zy_u#gyD%v-BEOVcv~+uR=~IJr=z3aB$n>v4O@oe$sn|hTfi<=5?J4_#KT*VLva3Ad zFa|M^h@W)nVoj`*|AqUBUTS<%-`x zV{a6qi?m(0W?3>;O5KS3?>Fa5)?9L+^YjxE`H_X~Z<8HrvpkIvRmQY0_dKhIMV1oM zCM8Y$o@#Y#h)Bxa3Nhef7BS^ay8((T7Z3ye4Cv%Mf0--ueyeLeM=)DBgu1fTvX6fQ z!w&|)123?t^Hc1HWWt`T${`@>xfTQ1CKWyQ3ll3Ym^>iQybom)=w`TmNqfol6lb_p zlhN?UnoF%y<9B+OeLbs?_Y_rYtAQ=|ch$vqqxqUkW1q%^=4~BH)8*~ngMRS@qL-?3 zLQW1>JW+VwEmcsJwd>rhH2C39Mg80P?Cmz0^)M;Z7uBq6n{?<{cT$G| zZEMHDpdY(#IQ%Xs_1ea+U=><(?|uu?%^ck3>Qu3ygYy@Dp|9GIyIkQx)()*0qbS^1 zb~DD-8z)<$z30evbetg@OTv}4#CL+nhY+@s-f7qOMxTxtnCGh+T@A_0xLWjVwSptL zi{2G*>A{1piyNW6NiB41eC3`)w#j|jM`-BL|8da zf6=@1LA9syM@Y&7YAIn>a|bAbtDT(U8-JbgZe>!T29iO68|$|^=rX+Ez{VMQCXlMJ z6i|KogDz3uQ#C8!&{9;AmuK(XD{Eg%YoW!CXQG2-C@?}}MahV#&F#zGMwxRHv=i2w7AcsQ6W?lKi90*^^R*RUrHu3b;h2Zy zbcF`8?+_JYx&w;ktfh`@22#T{@g&_hB`L36-;8AjmxtR+D)vQL_tK&#pYYmi;ulx= z(&rXriq16^#=^NK%F?ra?^i7k`#Id(a(+YbaVr9Hf@hTnILIVUZ!(!5?SND81HkB>Pk zd|nL~hVGdP%}0=RKBN*9?FnUyvNd?AWA-z7%}Uc!5FHgn&akf+!oc|u(9#bh@&!$` zEV!r$8v=b=QM8q6%$ZGCNnQ+gP;~CxAzr};G;|*Vx;&@Gohy;BT|T+3f zNDUMrJ2zYyg@w*hrzr2(W9*W(X%f@ag;dGPGoQ$+wCH`2Dctojd1(>j8&L{sv!yrC zXj?0r{7`Ik-TRjh2-mb3e$=6dm2&%qbI%JcB;8SN~q zac!IpcrGD7TXttPp`%T5S}~xp31lxhR%w=0=~_Msf#o;PhHdt|I${0_a9&k*-u3Y- z>n~)!_N%1QfAm|m=p>9WE&V7}6;#K01Xn)@k;v6VA~+vMdam7Tl}}!vvdLxChrK`B zB=Iw3K%qoFtG?Bxet8!kne*A$Iis=CG2?eq>h(&Db_WUQ7aNyBx7jZ-x>wcaVw;? z?pUZo18NO({DG4d+dbpE-^<^H&Ym`}LX0hu52$@WZ%3(C)~;ZY&i{{pQc{!`(aXjW z{YG#TTaTZ5!5Sb_^Ow(bn?fow!H9u@fdx(Z#PmnEfPdSz%%>)!gIgi6S-+3#xzK7D znn=Oav(Z3K(lD3eoJxYWgy-3NJPv&-)N zo&7u_dHjrz_HMg$K;Syu+|@o0kvEc!vxwOZq=2hB%neAU3lcqBO69qwUb#$B4Wl4| z50T!To?9Czp}}fW(xB42k#qP1-|uGxjl3A`-E)c}#cSsH6To_t25ojfn_B8JhsLsX zg%%&hEHm=l>{{MqXt!o72jP+mv}msVxMnfg?kBJHRAN{xD8EHbL)r2e^vwS`HuX*3+rC zbV?kB7B7aa5x_+{7$Zg12#zSGR!<>@mic%1R@?}x)R@D%3=3RqE)*OBJ^_Q4G;7|` z^n-1UYT`|nv?bZydkV3|w6vyW8>Xz%t`a7e+=Mh~avZ|d{Os{K$M$#Owm7bS|JPBD zCUorM>Kt`3h^3yUpwAd3#?&rP_4yfg7e(gegdN#b@C;40J!82tK4W1?4%^F~qw-y~ z)1JV#80sRDNR;G8emIs?gVmpkEr66+PGpMM@y^+~`s5hksF-u!v~P7VV7B8pJ86=)okX6#N`z3@qAdt=)cG zecBP(v&^4h3hyJsffF9uUY=eVdRlG3nGlANYQt)>XGw*TcbvjKRr`>TQiV9ZPrg%K zj9Q6_a;0hP_+kRx=$GF+u3tb=Ws>Wg{dnS4q1-6%A%%$7KZWmd^#h%`qOR`i*P{13 zc-2LTeh8qYeFJ`!ou8FZzy;4JyEw++eQCr>P*))7fd07tJ@Mnmq=;oMYd&WJG^1`G z8XjfamovTqoVO5FdvtwIbUOksG6;kw%%|)&3jU+9V;Z z#d_qgTkX>1i{Tyq;Xw&|%V8?#^!y~=6YVu!qjzUZG&^ckbIqgtnRyyVUSG@)#pv5zbt4f~A+yx|T@^8^i$NI2*V3jc z3#uIEy+U0LNqN{c)uK^9;+VZ%(uH51Tj)0#-`y3+J3FV5-WSDH%)dF}{JQ<+@VD*wG?!@0#;;iQQUX-dhyA&{7Zw%RfKaZfJ=Wl}- zJPEuD=cA9jfu49~A&k>>m%R2k1QcfDOsYqcM#Q4E$P$!hW3J=+zQkm8`=ze9q_~l- z-@jA;+h0TNf|h4E4g|xaonu*EY7R&%*sif(mr%8+deT1DedZsAdfGKbSS7hGCc_z>RlJ@P^*{q#hD^-KpSA00#0n{hIb-<;1+61v zm4&XIRAYhJjKLoiWV`m6I;u*VUv!MSkNFY|Qe1fAmLL8kpaoHx-ZZ7EqA{X(@f!+I zZ#!j#KdC$Ly6Y9K<1}@dT61SGIhNE7OXMuvWhuU|nR(To#=uYR#ANd>m&omzRb*to z9XFAR+vw5drS1fLD^hXgpIZ9j?K!ilsYDs+-)m8UyVhT|c?EGa7;>9i9x7y4rw%_h zq_0-Uq;U}4cA+b8;V+1Pw<1?^Gq z%TYFgsQ{`U^cy$?^vM~RL5t0YPphbjjzru0;v-Dz8j9B|orWeWOXqi(m!ih{!w=4O zwxRbUJ0$u!dhve9E4k}Pe_Ji;gG5{&TG0;zFARCef)hYp+S`g{rAjGlMcfej zj9mw94TWJQ01_IrepQm)3(_mfQ?R7Hs2ORuH1m^1^-&{f4dC&BA<#?`<#)nZsUGe? z-VrND%oBJClRdU@#?NnTtkB>1<)sBxhI2(K$=NGwoTBY9wT|mGSKIX5ldau$5LX4E zg3?AU+MwIMzyp@B8CA%4pX?9cg5!o>VmEN+*1vU5by>8D43|d6f~XUZ^KBE^lx&3} zqvdR42vfn56+v81pP&&a?Oq=37kY?SQJJH#ka_Atad$<=xzg-PS_GQ7CQxmSv3j9@ z?@^P>P$P;=u-C0?AWh>j#FTi>xGnu#PtYbFh^Sg-pWyhv8$!B z8a%!eI=XTXmZ&1^hdRn9CBk@QH)D6V{@9d}wS1Qj{e^PO0^4yUdb>+->iO9w0HDio zeEH0UGt;}(*RW83y{cZ109WA#Qw6bK!GZ=p^zUuVzS#XtdTpT{s25D5_hx+44MV?3 z<@vcFO-&KGnXt$@{xx8r%~iKctb9cI66(1L8_Q=F=4I2VXhv}ky> z4Jo!DBxyvC5kx8t-CRselE=xsWPW(JcRS(XV1@#UmdEaoe%K3JYg=^*^ov0@=8MV}7hvlFXbH`*({vEmc!Sy8Cn4YI54*1j)espeIa9!_K= zXd<|EQS*LQL8)isOjO+RIGS{%QW@4qB z)>C9z5h>PYrx*TCJhZ%}zE1u(>ChLHFw=+@z}J`m{>Lgi`oQr(C)#i&_T3^+gow~v z@om@y*Uw_(+7+H(Bz$pfuLvcZ?a@ipkY#Z+?^8E88<$F;2a=%To7#^e)HF0Uc9nNS z&{>iu@#Wqf~@q zGsd+oB{gfCuH<0nC29NDnqO|0VWMZRDR*Z3B?mF5`?Ianb$XMp@mWSRa#Uq=MfCH5 zM=t@W&oYlJhhJzg7?P~$?$VM_6fZj0khs%O$s0_&hN?pIrAWm)>o|kX*Rl$7P$gt( zS*fRINKXC?)c+o{C3(5#E?LWt2HPZKTWoFdxvX9P7DQT1?Gzi@%S&X$_>R|(U6{y5 zU2Q*IO|GE^qC$zBVPX_-few7N>qh{Vu09 zTGi4t$~`Nwq=H&$UH1Ue{a|0rncQNiAF>(-mnfDOGq;cH!n4pCd<*)M)tu;GwK9JA zQ-_j2-WqOKK!G@oG8b+?8(!3F_oe*$TC**PhD4$Sh$PcJOw=6qm&hExX6c7Wqw}&B zFK2ctjDJ2Mpr5t2w#Eh2-oa&9Wu%8Ee5_;*hbP-sQZk$?doygx24U@>T*YX2>YiM! zOcl&M6a7p2o9P(E29K6zz5UEKr4qR=aD{C_FVp1YZlgJ`EljFpY?_D_h`sxdb++1PW2k(~{f`L?SAV?2}Y6@{tx_4iJ-d1HO z%wyteY8skatbknI&&*nk56M^19d7ZNoESBOu4OCPD|OI*U&nx~K|OB9$ivl|-kQwP zpotc6YqKrMQ=U?;OIq2s%-*#%^)1aU_YQwaQEQr%HPwISbP^N%AMkA=>=rBD zAX}gB-ZmS1y2-(2wi|K|wnk!I>c^H8q*B)}_Uws9Yyd6Ifa=b%0bl=C;z=UxG740X zgbokG2b?59SkkV#e;@e9u_pJ`TR~XGk{P>#vb1j;f!4k4xNo{c%}8rWBi?FL_c9?v zY4(;QO&Sy(Ls~;gd*&?%^}7PDFLT*ZBP&Kas;7^*xx5wzzL1=^iPW}2K>8F5Oh~c8 zd!-kC;-~SOEB@iDTnLxv%n|ChM=xhe=JYDZFVGMk-zj##8U54x${IexWURF*d@&)c$^f-r#6xsn8npL+*01B&<_am)%-tJ`Gz|=# z0_^ufPDmB}IyT~$XY1TqTr~?T#l@xc%S@#aJ!?BMVWP6CH$9{k&^cV!NG>Rn<5Z<| zhh3%Y#2{8$+^IA*HMMx*FSyYV0PZU-o0^vVZ8RvyH)yl0!HQSkhP& zUuU!bHhl;Xd9%i&X?k~eZ1acU9ji;ZFSB>dUoF4cjyXuXn3KC@))*VO`-%C+$`Quh zy{CAuClXR%Gi`UTW&GryEo+c^Px{3uI-GTez zw(371q%iyEt<}H*Xr`b^I|tse;@avF+rE=4j73FtEIt`Age~F zKk-)qdiB4E+00PuWRK47-Ig*R9n8`W+jM|J?hN{*m6Nc}%wo&Gmr~eQKAk-T{L0Tq z6tu<2z|yX>VvHr3+}!Pi?;qjMd00(q_A7Byt=x+JcU8>#BEW9f7b`sCqrw3?HoyKM z?Wjeh7XO%VT}bIV$27V(d}BJW14HOCUJT0CXN+~Hn~YQL6c;5Y#v5kMu!TNm+yL`R z^oCOtxKF2Rt7jW~zr}^Ja|kew^N(sW#~qgzP%aKzpQCx?CHVNcbXOUZD!{9-j2dT> zT274yHPCFMxUM2(f>{nLsNKmcd$3H6oz%TPZWm#-jSV(~qXEt3Mp?oK$*87TnUcE3j zIjNow>y4_4B4M3!Nlg3uZ5=#TjnLg1EREP#DAI1g6`^ZeZKdVML5K}y5rHl5&V!o= zSxEcn+F2m*-Brgsu^8-AS^kYwj~-*ScIo;SkJ|*d!qLSwHqGzW+^BD;%jeb0q7x1Q zrz$NfTrBR9?>x&tbIIH)N>fvRsHvdN5UBoio|2A>7KYED0j8ggGL*p!@D#?3RUzrOT z!dzt-%ygPqkE0G^`*`#izby_?VyJ0SDz;bU{E1e#DD!eCt0F7}t!Y@)wzkqR4LNy| z>=xeMSkKRY-a63bl5ebF($cAU&`{aEM)oieGW1*3*!WzHsq=TH#P30*XQx0f@*~o= zRehd$9`|j@bacE|M&GF&QF-vO-V{=Js}vYHz|XbKMjAcq?~Fv(#Iw=P)P*+XrTkFf zcBvv$20fa(&sTBN2gYrw`%y3Z5XM-+4vpHiPAqR(o%fn>nLaRHdSS?GefPRfy;BS4 zdQ!tDY=aj00saEAT)3wKataEViuo5aV$Yz=Y@aS*MZ45{I}whFrIuD>!;7kmYmfu3 zdT%Samqahv9G8#+-EwWXtc39$H_3hVfG_sG>GE}^G*xmY>K;!iGKZz}dBcyc)zE1D zBa@ITDy?vJxr4%2ReIKbwn5kXT3bW11V}>%V>>YP{gYT(l#gGd zJ?ys`?GBM?B140syX%nN?%K$M&njGBOS!z!z!9$uvqFCoeEP8Go)LSwjEr5cbl--H zg3ZtI2uRoP9nDtwB4c=?qCv&A*3mM|D%j3n5Z)}70#AX9ng@&8J+c>rFJ#L!uGpCu zfsronR~=yKSwwY6)W~RVRfUP0mqNYF)?nkFd$@VC@jE{bT0hvUO#L&h%3(|S`4H~v=04VTPdstjx-}(ApkF%1y*2W@sMO84 zNXI#e)^V(0o}SAQxv~naJ_OjAno{mo^gg`O5?50lKOvV}V2L_`;7 z=%DZEiBpVB%*n0ty6LHc^!3zvq`%M~9c?iich}9HjSU-k)Pn$dO|X>qY`tdn6~Fs6 zKPOVhFf1)2CWdQti=C!NmWf=k&bEX^u^GVE$Fy6*n59akw-R7e9d5hx?A{^oX89*U z38&K^wV<@NeMk8d2ifYy)Rlal1PWH(&x?S_zoMIp|AySxOKYrHXx**dY@SKLl;;=9 zs*G1}1e=Kz*zV-7BupvBzG*4c&0Z+gjnxluzOZ=JwHQMS2)^gV0`&}ZZNx@xcCMg3$n?znKgQ}^ic`Mwn-UTjm*2PFh1sS7r zwOeDBEa^Z@o7L3hx@@xGhQhGJ()w7Xxm*C{RO<5$Y(U!M-)){0U8lPW#$x23SfIz} zj+AHq!Lq_F`=z++q&sNQ!o)S~H9`#Itk4AcrrK1t-M@a=SC+wP}alj-3(^5)*M z4(;-Rs!`lsGuYk!Goj_iL62AFe)I|b>VKO0x64VoMdf-#sfW6S@0`F$y6_ONUs3b{ ziAL!KN=Ln#CLJ80u2dZY>~FC0*0`W7i3c%k56N&w$?E~!{voGVOFL!T?TFP zll+~Ya_@X{oItgMx*>>An!Q8`8?YXd*gj?UXFBsgBwtaFnVhgq4jS}0hjr@BmKu_D z(M^fs2cmSh)(LPbCDP8J&}mMnXzzr=&|smHUVs#skFak1{|(%DTiX z!N9;2wjo|^^av)c;j368B#8l``GW0`5GU}O9gjs98J!+CG&oqS&VRlBF?qeGS=)Y} zWwJ?mve5L^_up8({QZ9r^+)5EWo_O>#^Vw>Tl1Zf3&nBOow2#WlnzYGYS()5s&#EZ zTQ2X!Id|Ooda&;tb%6|OrK48QM+6=yk z=<@FLSYc*gQ*wXqarb&F0-I^1$FySAG+-#d@?+5pzm>;eHmH3y8x|?w!@julqlXaD z(T8D;Q{tsUgU`x(X~nY_a|m#|2_-=+rY_~kDukr4kD}tX>iu^oTrV77c;_>7YfW>Y5%#du6?{p?we) z=6aYJP~O9ROR0M*Z_BsIaKZ5g)XpM^w}KYQv&ojp16$5hvNnX=JsbV7ZNZl&KP615 zcf4;~@w-XzXsHBGyhh5(zuq5ty)`Zrozb6d5x)Uc3HZIKU|5;Tv?V38RNeMX! zcNTbh>7r{FBthR-qV8-RfBC^>ob1$_5kH?x$^iE8wb$}>b2t*gr~*9OK@J*IrQxR% z+OYrl9p6X|eDbSq*Im?enqo;U!`KQGzE*nA+JyydtJP?=Vo8eV_|A%wjB5u)i-Sax zi&QhIOO%l)>v)zexK=(eLNHyQumY_2qcO0Oe%8N@%~q~0;4p1Z|4JXiD9Ini=jx*jSI*_O~@k zoE^iKHj=7BeexXI<0%W{Z%FRzE@@@Z6LA7A8aR)4rO=UHr6Opa>3VP8;;$>fH<+39%mPrD<4T*A^@3^+p(~&x+^pM?RFw zc|}u7lhI=kD&r(_u_R(cp4FO=X}#-R-2RhkGe0vejYe97v4oNR3a1p)Nvw#ZJQp}A zd*pG^X^X21tO|~l959^nw{vzc|3OWFpF3V8Cs~+$NsacxR8D!T`93%VEOC~tt_2_b z-Pnku$1>8*hY_4&-xfIP_*pPT@317_UmyAul88Io{!&%j!{Uq`$`%y4u=>R7vABi) zKimClAy`>)MT}t${=MQS?`RIfnGatg!%FzE+Y2A7UcBDR-+whX7JU9PjG;MAE-zhM zh|zBQg4W?mS7C*3IxhOm!Z{yRGIN;z~U5#OoDPq6-0tij^1VgN_j;T81; za4(!rvNO7gFwSSn+;h=QcHlQyhjsCMau}YT24u%_5q)EjX^(BSqTQQ9!Rk_|{cG(V zE&ZLEc(1@x3So8e=D==!YnMfF&D&m&M}=MnAWIL`&JMM?b=Ob}10(D1`QpKKMpDlf z^$`<3%Yn8JyXN3aDnxjdfsXqhx2(GwjZ4E^EiM;zK%l+O^PMk?t%gyKtAuE-F%eZI zVpy5u3UW)qvr=1k$YVocS?G#$u45n&Gk477f-oY!jE;J>BKh{_A3Zy=ycNfE-7|!g z&bYvEmiT_x=OTv4dpi%wCJy(&wUn`2E2p3Jrpzj_Ho&%^Zkuw?(8Ott+R({gc*f@f zvaM^V);i~`Zg`PR>eF?ZOF6{Yh3aR*DD7;U^I*wn5O8x!A!Bx3Kyjwd6CAG@2sGI2 zBpR?n2Xy@YMu&wf^$h|MfN+(O(-4O>NzQuQD6%8RJwA3O2ymj9??GMwk1r`<>a|-x zRNvq&n4F>{pDQ$GxHI=5B!=-er&nW~2Qn8xbg+txX?I0CmJ`sGG^pVV@@iV1wWOoP zg>dLWLELseCz1EqAq8ol zXwt%r*xz1@xElEDt>joGG?g#+K(UX`X}kY?}o(Tx-OlRXeh%Sly}EL@;pK-67igKJ3;j-0Kk4%1xf@r4h-7 zV9%(kera37_Jb9zn7j`e;BTOv$mwmx^RL(Ikj75AOjr)tIlt*v)UtLi>k&5MIm`Ij zQ`oKGbFp{2f0hbM*omsqSe)(lD2NV2V{K@t%tup!s9lzR6{*3VAITL38-F=4WTd0I zl|*i*?Kt+5HVKsa!h2KJVs>EUOMHAUZm{)h*N%^2Zp~#xP2B+h+&(W%MNC|Dp(#19xZ%HfGB){55|JSHU|clHJ4+*(<#YNS!-FEX4 zh(zZNcn$WVnSre%S$Q#5Q_s-cn(l2UEtG4MlWXC9vY4rLPx8cgDtKwqTYic&_vsijrMnJlb+IMj zr-#0orF^eWIn7{VYhvlXJ!5Hrt3*VF5*OG}h5 zmH6FrkRw)kD!b4OOdXU(oD#N7O^tv0lFg{LLRiIJUwpc7;VJPMz9=9Et=W!T@IQD6 z=wkefX-6kn-o;U<}2Up8h5_o9M9;t%x1ad_pl|kLG9YI<*Rm6*by(e)yB7H8MS#0n; z4rEMh(|X9bBKC1dru}$9hO{0xrLcP6ZE)bISQy|)ajREQV!zPY-7l&`cGdTkk6FmD zk5^?cRWgtQISBDXxS$?)cG!0v5@?XB`mGr3u?4*P3esF=)a%{>ZT!SRAl)7Yw&QG` zX~7rx`ks@<#Qu-Ou-SA!MJSl-o^S0$BmY#2`l+XmFz@%&@TPN zy@6%F4`>Xkw!<0ikwom+$$O)C2QrjXt?P@OqKH*|>yhMFJ$mpl5nr9ZhJK$fxOnA@ zt1Cio`OgI(=8u^jGcau?4$+jZ4)CgWP^vod$uMaihcBQq zXsDv1h;#9I_4$jXI+PI;?XL;w=lC;LHZLY}Heu6y<^QXlFaJv_-~YGI%+!3Qsm8Hv zal7fLIc7$RyEt=msl*t^U0f!~6_H$0!EJ6$=8`gLxu)E4!Q4{>T(NRtT#yt60Tq{A zP*DjN5Vh~=e!la${c!L71HRwk0i4%^^LW7fyv{ki&v|b1uZ?&5_!Wr-Yv$EOHwl4? z1=%11(=~=xQO3ak<2#6pmKvq-d$bRdIcxu~7L6~tL{!$b0IY&({@#1JpV9_5Q}XlQ z0SI(J!ee<>KzxbDP&5vi>VQ9T@U>8}izXQ|t?A_$dO{zw2e8Wqtb`&=!%uifE@ki! zC0*tf&g)yq?uoPunJtshaC{jWRe!|03-BufsK0CBE zRlr2;pR}XUm06*h)CbS!%oZtu$v1<`>vuyPJVGnRGwGcR`Ep_BUw-t9|11w(dM~G4 zkC}St_Ix@&aCYnNro^;j08)^By4(+I?R(njI3L3r3x%|Vk}C*G>nK$O-j4ciP8b!E zHs=q=&J98Ar|$Jc3F-Mf+vY{66u1rl%jcxO|8rCyszYk=QPYLdagLzQN<0iBQ)l{g zy45b!^a0s89idb$nGOnlXcp@gPQA{&c_YE8q64g8wc*!FKqlKz(ifg;bJ5fpr-&-RaNUM~LWbA5Z!MYRlSI3|TPC znv#ptF_lb`vK9JDM&|pdlcrl%OAWrFh$*!ES_Y4eTyk{rQjb`foc-ms$0qwZrM0R= zyE-X3XtIjwnM*eBW^IxxKU~EI2iJ6WZ{hT|@s;ncp_Ns}<3D)>_sArk3)4`jOy!N$ z#pV%8I?Qahd74O7mq}G$6#EqOl&vJT+oRCEo=oGX)cm({1Vs-uZ}0mzS%R^A=*ZkX|H7CFKdcrd0P2 z@P0k?*7}&=ys-`s3PAV52VB4LJ^>1Q!ol3m0I@(RNf_VmuO9^|gHiEfD~ zgC`lz*HYfCY$p{_B`#~6sxj|#ZQ4;?gc(qjUyxv!rz9v=*eqK6?pD|@2kX0Xx<`8- zMjOpcAdNz7xjyHq^~Z-v$l!TGyyL;p9P=A?w#Nw$tf)R+=u{BCr67=z}|H>VZEHU$jNu)4jFD?!Y!>NMi}v1`@6hYe_vN%h|{DzXM)CoSB?;b6Dfq9 z1~)aH#d|qiSY2?L;rdZg0_V#H#bTY3R|b^os?9t!E1DN(4`|d~4;EL_V_R2~FBB$) zED;Ce>H3gR{9TNWnzVzib~SRQy45MErXeRWi!Tv~!wzds`C=`sei78x&YB|Am7~hl z{UVn2n+6eGFmrCSSPD`aWH>y=wd4BFm)8cWeLCXL$cjl})Tef(71QUA>6xM-M`xNjLAov?6*uq|FWN5JY{B2-tzCMe}WepSd^{B@V_ia?ue= zX9>0N)O5(CS5j#7`f`RBN9|wDd&Sw1$W;I*gTKXFU07mG7ZdUb>grcg94~oQ%{zHV z8aNL{8b}Yi0I9dZp1Q>5f0Ww3ra&RAF7B zZKXNAr;qw*HJ7BU6phl(?y+KRHquQkPb<&mQT50AN{8oh>qgws(GXoLgZ}Xi_dT6)e(Z0r=PmV@?{D16MB-oan{&HrQ zs$JkmV$P6OI(lr=$anX5TIlgA#Y`Rn;_X_Gt^k2wrA6<8 z3#T^mp|d%0iu;=30FgPS6kl*=6?$uREHcr&UW3-Ky=!~|^o+kKe) zsQ{^-F=Z`(9#?gK>@S`Zje(aBeYiUL7mrW7HxBU)Z_%@f+(IZ~)RWT`8N3x-K;mCg zs~(~C(w_0O-aHIre=*x~#~-jFOpOy~J0cJrW}dmUFu*j=|GEo1;Z+h`Bs0Cau%IAs z64x}5!K^c3nj_k1I^JH_SX4FHbXRY4w8~vA(x|Hj04*j}j{Zm`54SV4u1UC1DGRRS z@H7R~^B|=c!2|kol#JyE)(6epYfhmn@vJ)YWw3c>TI@!2{5VWwRM!LB{l(TFXKn0nHD@0~k zaq1QKf$$6`wXd18eGunSww7+9%xRd$k3}6<ne zdJN1QtmAbZn6;pV4njDr5tTL^Bxs5lGTFYk(=^#O!?kc5Mc(yvF#6V-%NodfY;IRP zn))mk&!NF#qctZKCdEz}BJ+^Vtq#N31~!b4-plR)(JMx#jPq(69Vqj=AXw<2b1Yin&_VD+0%8% zA4Me5PvxeUT@xCNYb!J>SBrh)x;E%DkwoK>pyo*(eClFfW{cH`{@EspfvqTneN0DQ zv5e-0S&&6dnZ5pDdJI7^{Bg8t1cN$BLK)&qgK7p@(YS0>1v>ER=~MyjuX#nS;`z3Y zMe8LF36%J@dyv*3Zz{|nT11Rh7kWrC)&kX(W?Brx`@DTD#lctLWlokz%0)(FK2qdb z+-AMrSSg_$W)KFl?JH^7$4gHMaatCSNexrkO)P|@yH^Bi>K?pQdB!3v#_m<)hS2UD zL!K~eqU$+3kRCErw^Iy_HE>((9@-;k)Ou_WMqysfnkl>~3OA^J!Wi+0>cyou(RU-# zDsf0lM;A~%)i^xE>lE`!0Y3Xa(QRS&*_d6?D{$Wp`n ziv7f+u-`nr@H7O)YlFIJ_lZ7;{g$yz31mKkNs?@?ufXTs>mXw{X_N03q z;24yey{x&_Iy-^rOW|984YpJ}Q(!<)zXX7Zf%cNYJtD_~=$zNz*0t>l$QiQB2Kb*t_AJ+3c`9K@wR?`&w-OnI&XGRjXUSo^)MBs zui6vh06E(I!j;Upzwceb1V3ulvqWVEXk!_*nOHy>S#ej_W!MnPboRe2a78;}Q|eNO z@;O$8QaK9)h0p!x%wLa*=@LK?&_gd)N7pguaBxFcGr+9~ahcVa3#Y-YBW?HEi3Zn` zk!~R*w;RQ4pjQ=XuZL49Mk_0+e1%qo*em2m`zQM%&FMnLM)s`NVNmcExzN`rCjok{ z8sPbg?SD?1>CObZDtb}Y`&BD#e zM*<$zn6`M@q-VPBR!D#ISnKnhz<$h4%6H>RpYMQzcNpv01qou=gj}fHaV_s5`45@| zCX7JKg~Vr*K(jSdJ2G7WuH&OGi1UA0+~7=SwjvR;34gD%Xf2?lAKHvaUciU+o)?q) zvWTq9y_+>s`d*&C4`DRWdCOM1H_LzhQ=MPc((pOT7({3pNz36N66Gy4Zo+J6CJXxM z|L|A&u3M5uLv+yj;)dxaEdSeKO4YNmd!&e3pAHvpD=>90J+ym$>q3{~n^JPjm_?vK z>E|=>zyEW(kdsq5NMdXp)|twU^-Mhd_fe4^l%;cDnU1ttiQpRbq-pSssly-@3}$hm zza-9un_x?E!uwV=@jt{Qv}gq;d5Y&dTN_T%)Ld?z-wWFjDqr>3e_YlM|NggjF*m#8 zuG#ZUIDB&8VVqw*rEsM5o4@wlCoTyDH@WQ^Q3+l zDD#qXQynyhX3o~s2jaOrcA(H3Pc<*apOoZgXs*08A>3fwWdY)IPGAtzzx};jI1+Ya zcEF$`fGKakKCkdqdhg7xBeNnDl?hr@(=5FzIAH@xek%jRh*rm(+N$EU>zp<|{bKun zM`$rM65c9kl5WM+*)tC-=hu2U8uejk>kU~?Gs#U-8^n;$!PWTTJ$p#i*l}LLFQ-1Jv_ye1o&m{&jJ!@4lzfVpO<3y7CEH=_w%%(q3ZIM0 z1`=}H^}JdfeT_&0ER%(wk%nKIDC$P|)#V^ARi~g%;eYYb1=k`C3~tPlZ+JcoEG2@`d@(Cy-B z66u@=f1n_Ia8AsV{f>iIcr5)Mm%Co*@!BL)_54||>1AK)WDd2r$ zyIn3c?;)=c@9tDgYh?Q20aK9yO=HovNDZr3a?s;a1( zJG>A?iAC-Uh_E;~Iv6avGj!y)H*V=6I!5GDu!bWZmQ1ZE zCpC&)IqPWGY;cWDLP9P1caK7d7e|Y=I8dkwSVBW{+c#6vtgX4sc~6?}ZHrsA@Et+L zB?ae2OH7!wQE<^=Z~xf9W9MaoleIte=6Grm33jnDlft|&3T=&VCYiqMi&C-&*@r$I%8=+Lk4|MGLLK%fXje7 z_t9PN3VwV{IWl|C0F3DNZg8IWi3CxLCOg+S!xFE%RRL{|CwqL!os+L42Hdcg>7FphP8B|+N!u{AA6T{k4e?RkEe@ajhZLhyrq^V ztc%(E_{ulPJJ4NX4yuCTzB5&LQ%(Oj3<@H!_^+kSP1^Iha~ZB-=b=pl*Je1v`^SsJ zeOh#sSv2I^CeMcUK{uX!yS-~$Y*2+sRpew&i||oksDS^L<_i>4@RqY!AL9D6=o$tkT4Nl4 ze7|^xP0zmi8`JOE<|B;y)}zq^8|iF=*Fla3Yo+~eX!x>jmBxkqfn;o62r;axq!N&1 z8$r9Fv@rGq!m8Ra!o}85tivfoDP?oK@}Wxj!j#e0%Bd8}SZ6LVpma|4^ES-_ah73z z6K{{M6^Ed)x6x+eGpvzSl;{OSk9mzb)d2??!|`erNnZ*FgdtG zG5*P0vR5t{n(eSNKNX#lx-E3)mm)r)yEr&K8ypnhri2xS50|Lt&B#BAm9uD+J9N3r z{P(rEU&2EZAEpW#0(o!WrlbY-fJU_Pk;)=UqaNLT_u}9O%3?O67-7mVk zH+3IDB<}d2nXsiLk$}gQ|M%DEGt(uSL5Xsq?0^s6|WW z7ha>=zHr#OA$KB4c-f!87mX+fw}ERqp;#b?4hVmEl_R#=_|eTO`}%dMOL=FHCl3%X z-KLe&)U@(e4$irD?jpZDXu#j6tq#AJ69X^Tq*n*!H@4wj_~5(Q@`RaZwtj&OAPG)h zIF)#6DG*I$jPpCD4sP!IOC!RI0#x+OpfkhP3v??Yz{5JJkfr1M-`KH=qXGH~FM=}m z7Q8OI;aXe=D@AxUt*xRXeyKw(Ra7TayC|gSr%6tX_N!(aQ6av;Uh4`NwqoxnS;exAL7LZ zv|`6zwXbPrkc)npIkI}ztbO7t95e8ofguqBW6RLXbA$tT5j(`S!^}D^Am;I_nf-4z zt9B5*@y$EBt-jZ=Us#?}*e`dimoRB{F)W^%K_@k^hCvR%4z5GqD4vG= zG2(x<{B&#M?ri;xWBSh`zlC-c zjOBG`0l8m#aMu8G4GX|lfgnDpbvMoZiEN{G?(0^D&1;j%6`XIA)8Q&H3CvVcZwY+M zgz)T)`vxo%o%XL*3q z|Kkw~0OeoFL6s8MpC9_~HizkKoCp;irr8^dJ4TCVPa$Al9CW%$020up^m)#tAqZo& zF*{r6n+tn78<1mQMsf}dSz;t{tIom8bKq3o0v=cvej6Wiw- zw%lQs9$TRPY&oA%?o4-jw$857er6l+5}alG@{hs%n`N&#ZC%0U^=5u-CiMV!*aUZp zG7yJ8*^^0)?pOebv=ux;GzG(W9zRu1?M%7-$US(Z0pOdcQ*{3-755`T_cic+$I7BIOgtkp zVzev^Vk4__^<ZAmm} zvQCI>`ec(;v>hYuGEqHYU(;DOI;H(4I2?+N7pqQI5G=wiwZXe~**{4BCh9DCv+Ho; z(`VSV^GYnvfot}4v<^3k{kHy1eI*=4r+ZS>)$$Gr<}@zfHPtSVUKtWYmT>+6 ze^MS@izxeMLtD{PWi_AY0Ery9jh^n4@z?RG;T_Or($1D{ z`>j{k^InygG5Tm8+xr*ETlJRy_x=l8;0NFq7al0KE(081Wqv-}!2I^1t^aGU{#Tp* zKc5|1-M&i33J+tKC5iD|ZySsHwCIHl06I@}5I`nkQfp+{F)c7PGw5D{&zm1x0We$( z9P1ZopA@xF0iVW~XPH(SkjHBNa*f^Qm);<>rGiN3AJDwJZbmPy6B%HlJMg9 zax>4{21QPsq0@96q^h;ZIcbBdLORWvU?>|G zop@ye4xHhp@tg)a3|kW#mm@I3W!LEP7=ckA2rQm|a&*13XZ_5a@@}Y;g?Tcs;?)M6 zO;_8ImLlP?0u#YNCZdexY3x!(wTElLZ}(-1JJz$9aZ?G~BmydUS7@ML5HR$0I(Cs; zY|vJ9ay1~QBV{OW`+~&pE@P)6$=P2eOkMdt=J3Z&`49DG?2%wF)_xbIm|&ZGNhS_n z4|PI|A{_jHB<0yRzgpfCEpqkr^}zhYv*V?x19|tJ=Ug+C*LX=25HujXzy-O5r0p-I zN_T`PkM2pLDX*>yW4$cQ$mShAxtcWp$@JM0-MXIonHKFKl>2D#%6qx7vR>d~=5<){ zz|PD|a03;WdS`+s)7HDgTdS*h!wpo!x~rsueb0VjL#PDfZ){0y;mX z<(7{6r{H!1wt?)ur^OriQ~R}DO-^YmLU(CPf+7%0sqDp<7BEYfC!JwF;S1TNyZ0t! ztmVwgROzN`%h!wkx0m*!qRGXWO^+mAa0$~2t+%we)2fso__m3Zv07(tbi67VZ)&c6iVP()*yoLim{Y-gZE%zszT}TCi*y8YxNF)It&;%IzI{FIQ#tas6SN z$F~-gq&s#6zmttz*9D&lVQ;3@{?@r=_fffY>D{_awY@6|vLyCU?Cqc2|4TyUuJR`v z#s|+oEqkj{TT~{m=Mw zz)=~Oe2_GQ13#h9@zpwZYoJnVhQu%M0A9sUkqiqHVR{KKlJ?@=1r!d$>yZI2M#R-~ z6;;i~J6(hoOB2-avM**2PK1}P@LfQBE88!c_Vadv>ae?@EqoT&q%*h)8=F5CVpxD` zKaej})!FBu?GuKN!MN|`&J;ZvUiV, +}); + +const GraphConfig = Annotation.Root({ + /** + * The system rules to always include when generating responses. + * This is editable by the user. + */ + systemRules: Annotation, + /** + * Whether or not the user has accepted the text generated by the AI. + * If this is true, the graph will route to a node which generates rules. + */ + hasAcceptedText: Annotation, +}); + +const RULES_PROMPT = `The user has defined two sets of rules. The first set is for style guidelines, and the second set is for content guidelines. + + +{styleRules} + + + +{contentRules} +`; + +const SYSTEM_PROMPT = `You are a helpful assistant tasked with thoughtfully fulfilling the requests of the user. + +System rules: + + +{systemRules} + + +{rulesPrompt}`; + +const callModel = async ( + state: typeof GraphAnnotation.State, + config: LangGraphRunnableConfig +) => { + const model = new ChatAnthropic({ + model: "claude-3-5-sonnet-20240620", + temperature: 0, + }); + + const { styleRules, contentRules } = await getRulesFromStore(config); + + const styleRulesString = styleRules ? `- ${styleRules.join("\n - ")}` : null; + const contentRulesString = contentRules + ? `- ${contentRules.join("\n - ")}` + : null; + + let systemPrompt = SYSTEM_PROMPT.replace( + "{systemRules}", + config?.configurable?.systemRules ?? DEFAULT_SYSTEM_RULES_STRING + ); + if (styleRulesString || contentRulesString) { + systemPrompt = systemPrompt + .replace("{rulesPrompt}", RULES_PROMPT) + .replace("{styleRules}", styleRulesString || DEFAULT_RULES_STRING) + .replace("{contentRules}", contentRulesString || DEFAULT_RULES_STRING); + } else { + systemPrompt = systemPrompt.replace("{rulesPrompt}", ""); + } + + const response = await model.invoke( + [ + { + role: "system", + content: systemPrompt, + }, + ...state.messages, + ], + config + ); + return { messages: [response] }; +}; + +const _prepareConversation = (messages: BaseMessage[]): string => { + return messages + .map((msg, i) => { + if (typeof msg.content !== "string") return ""; + return `<${msg._getType()}_message index={${i}}>\n${msg.content}\n`; + }) + .join("\n\n"); +}; + +/** + * This node generates insights based on the changes, or followup messages + * that have been made by the user. It does the following: + * 1. Sets a system message describing the task, and existing user rules, and how messages in the history can be formatted. (e.g an AI Message, followed by a human message that is prefixed with "REVISED MESSAGE"). + * 2. Passes the entire history to the LLM + * 3. Uses `withStructuredOutput` to generate structured rules based on conversation or revisions. + * 4. Updates the `userRules` shared value with the new rules. + * + * The LLM will always re-generate the entire rules list, so it is important to pass the entire history to the model. + * @param state The current state of the graph + */ +const generateInsights = async ( + state: typeof GraphAnnotation.State, + config: LangGraphRunnableConfig +) => { + const systemPrompt = `This conversation contains back and fourth between an AI assistant, and a user who is using the assistant to generate text. + +User messages which are prefixed with "REVISED MESSAGE" contain the entire revised text the user made to the assistant message directly before in the conversation. +Revisions are made directly by users, so you should pay VERY close attention to every single change made, no matter how small. These should be heavily considered when generating rules. + +Important aspects of revisions to consider: +- Deletions: What did the user remove? Do you need a rule to avoid adding this in the future? +- Tone: Did they change the overall tone? Do you need a rule to ensure this tone is maintained? +- Structure: Did they change the structure of the text? This is important to remember, as it may be a common pattern. + +There also may be additional back and fourth between the user and the assistant. + +Based on the conversation, and paying particular attention to any changes made in the "REVISED MESSAGE", your job is to create a list of rules to use in the future to help the AI assistant better generate text. + +These rules should be split into two categories: +1. Style guidelines: These rules should focus on the style, tone, and structure of the text. +2. Content guidelines: These rules should focus on the content, context, and purpose of the text. Think of this as the business logic or domain-specific rules. + +In your response, include every single rule you want the AI assistant to follow in the future. You should list rules based on a combination of the existing conversation as well as previous rules. +You can modify previous rules if you think the new conversation has helpful information, or you can delete old rules if they don't seem relevant, or you can add new rules based on the conversation. + +Refrain from adding overly generic rules like "follow instructions". These generic rules are already outlined in the "system_rules" below. +Instead, focus your attention on specific details, writing style, or other aspects of the conversation that you think are important for the AI to follow. + +The user has defined the following rules: + + +{styleRules} + + + +{contentRules} + + +Here is the conversation: + + +{conversation} + + +And here are the default system rules: + + +{systemRules} + + +Respond with updated rules to keep in mind for future conversations. Try to keep the rules you list high signal-to-noise - don't include unnecessary ones, but make sure the ones you do add are descriptive. Combine ones that seem similar and/or contradictory`; + + const { styleRules, contentRules } = await getRulesFromStore(config); + + const styleRulesString = styleRules + ? `- ${styleRules.join("\n - ")}` + : DEFAULT_RULES_STRING; + const contentRulesString = contentRules + ? `- ${contentRules.join("\n - ")}` + : DEFAULT_RULES_STRING; + + const prompt = systemPrompt + .replace( + "{systemRules}", + config.configurable?.systemRules ?? DEFAULT_SYSTEM_RULES_STRING + ) + .replace("{styleRules}", styleRulesString) + .replace("{contentRules}", contentRulesString) + .replace("{conversation}", _prepareConversation(state.messages)); + + const userRulesSchema = z.object({ + contentRules: z + .array(z.string()) + .describe( + "List of rules focusing on content, context, and purpose of the text" + ), + styleRules: z + .array(z.string()) + .describe( + "List of rules focusing on style, tone, and structure of the text" + ), + }); + + const modelWithStructuredOutput = new ChatAnthropic({ + model: "claude-3-5-sonnet-20240620", + temperature: 0, + }).withStructuredOutput(userRulesSchema, { name: "userRules" }); + + const result = await modelWithStructuredOutput.invoke( + [ + { + role: "user", + content: prompt, + }, + ], + config + ); + + await putRulesInStore(config, result); + + return { + userAcceptedText: false, + }; +}; + +const wasContentGenerated = async (state: typeof GraphAnnotation.State) => { + const { messages } = state; + + const prompt = `The following is the ending of a conversation between a user and an AI assistant. The assistant has been tasked with generating writing content (e.g a blog post, or tweet) for the user. +Carefully read the conversation and determine whether or not writing content was generated by the assistant, in the most recent assistant message. + +In some cases, the assistant is responding to a question and did not generate any content. +In other cases the assistant is generating content, or revising content. + +If the assistant is discussing content, but no actual content was generated, or included in the response, set 'contentGenerated' to false. +If the assistant generated content, set 'contentGenerated' to true. + + +{conversation} +`; + const schema = z.object({ + contentGenerated: z + .boolean() + .describe( + "Whether or not content (e.g a tweet, or blog post) was generated in the most recent assistant message." + ), + }); + + const formattedPrompt = (() => { + // Find the index of the last human message + const lastHumanMessageIndex = messages.findLastIndex( + (msg) => msg._getType() === "human" + ); + + // If a human message is found, slice the array from that index + const slicedMessages = + lastHumanMessageIndex !== -1 + ? messages.slice(lastHumanMessageIndex) + : messages; + + // Prepare the conversation with the sliced messages + return prompt.replace( + "{conversation}", + _prepareConversation(slicedMessages) + ); + })(); + + const model = new ChatAnthropic({ + model: "claude-3-haiku-20240307", + temperature: 0, + }).withStructuredOutput(schema, { name: "was_content_generated" }); + + return model.invoke([ + { + role: "user", + content: formattedPrompt, + }, + ]); +}; + +/** + * Conditional edge which is always called first. This edge + * determines whether or not revisions have been made, and if so, + * generate insights to then set under user rules. + * @param {typeof GraphAnnotation.State} state The current state of the graph + */ +const shouldGenerateInsights = ( + _state: typeof GraphAnnotation.State, + config?: RunnableConfig +) => { + const { hasAcceptedText = false } = config?.configurable ?? {}; + + if (hasAcceptedText) { + return "generateInsights"; + } + return "callModel"; +}; + +export function buildGraph() { + const workflow = new StateGraph(GraphAnnotation, GraphConfig) + .addNode("callModel", callModel) + .addNode("generateInsights", generateInsights) + .addNode("wasContentGenerated", wasContentGenerated) + // Always start by checking whether or not to generate insights + .addConditionalEdges(START, shouldGenerateInsights) + // Always check if content was generated after calling the model + .addEdge("callModel", "wasContentGenerated") + // No further action by the graph is necessary after either + // generating a response via `callModel`, or rules via `generateInsights`. + .addEdge("generateInsights", END) + .addEdge("wasContentGenerated", END); + + return workflow.compile(); +} diff --git a/src/agent/utils.ts b/src/agent/utils.ts new file mode 100644 index 00000000..1599d473 --- /dev/null +++ b/src/agent/utils.ts @@ -0,0 +1,44 @@ +import { createNamespace, USER_RULES_STORE_KEY } from "../lib/store"; +import { UserRules } from "../types"; +import { BaseStore, LangGraphRunnableConfig } from "@langchain/langgraph"; + +const validateStore = (config: LangGraphRunnableConfig): BaseStore => { + if (!config.store) { + throw new Error("Store not found in config."); + } + return config.store; +}; + +export const getRulesFromStore = async ( + config: LangGraphRunnableConfig +): Promise => { + const store = validateStore(config); + const assistantId = config.configurable?.assistant_id; + + if (!assistantId) { + throw new Error("Assistant ID not found in config."); + } + + const namespace = createNamespace(assistantId); + const rules = await store.get(namespace, USER_RULES_STORE_KEY); + + return { + styleRules: rules?.value?.styleRules ?? null, + contentRules: rules?.value?.contentRules ?? null, + }; +}; + +export const putRulesInStore = async ( + config: LangGraphRunnableConfig, + rules: UserRules +): Promise => { + const store = validateStore(config); + const assistantId = config.configurable?.assistant_id; + + if (!assistantId) { + throw new Error("Assistant ID not found in config."); + } + + const namespace = createNamespace(assistantId); + await store.put(namespace, USER_RULES_STORE_KEY, rules); +}; diff --git a/src/app/api/[..._path]/route.ts b/src/app/api/[..._path]/route.ts new file mode 100644 index 00000000..7243ee07 --- /dev/null +++ b/src/app/api/[..._path]/route.ts @@ -0,0 +1,65 @@ +import { NextRequest, NextResponse } from "next/server"; + +function getCorsHeaders() { + return { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "GET, POST, PUT, PATCH, DELETE, OPTIONS", + "Access-Control-Allow-Headers": "*", + }; +} + +async function handleRequest(req: NextRequest, method: string) { + try { + const path = req.nextUrl.pathname.replace(/^\/?api\//, ""); + const url = new URL(req.url); + const searchParams = new URLSearchParams(url.search); + searchParams.delete("_path"); + searchParams.delete("nxtP_path"); + const queryString = searchParams.toString() + ? `?${searchParams.toString()}` + : ""; + + const options: RequestInit = { + method, + headers: { + "x-api-key": process.env.LANGCHAIN_API_KEY || "", + }, + }; + + if (["POST", "PUT", "PATCH"].includes(method)) { + options.body = await req.text(); + } + + const res = await fetch( + `${process.env.LANGGRAPH_API_URL}/${path}${queryString}`, + options + ); + + return new NextResponse(res.body, { + status: res.status, + statusText: res.statusText, + headers: { + ...res.headers, + ...getCorsHeaders(), + }, + }); + } catch (e: any) { + return NextResponse.json({ error: e.message }, { status: e.status ?? 500 }); + } +} + +export const GET = (req: NextRequest) => handleRequest(req, "GET"); +export const POST = (req: NextRequest) => handleRequest(req, "POST"); +export const PUT = (req: NextRequest) => handleRequest(req, "PUT"); +export const PATCH = (req: NextRequest) => handleRequest(req, "PATCH"); +export const DELETE = (req: NextRequest) => handleRequest(req, "DELETE"); + +// Add a new OPTIONS handler +export const OPTIONS = () => { + return new NextResponse(null, { + status: 204, + headers: { + ...getCorsHeaders(), + }, + }); +}; diff --git a/src/app/api/store/get/route.ts b/src/app/api/store/get/route.ts new file mode 100644 index 00000000..042e3708 --- /dev/null +++ b/src/app/api/store/get/route.ts @@ -0,0 +1,67 @@ +import { Client } from "@langchain/langgraph-sdk"; +import { NextRequest, NextResponse } from "next/server"; + +export async function GET(req: NextRequest) { + if (!process.env.LANGGRAPH_API_URL || !process.env.LANGCHAIN_API_KEY) { + return new NextResponse( + JSON.stringify({ + error: "LANGGRAPH_API_URL and LANGCHAIN_API_KEY must be set", + }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + + const searchParams = req.nextUrl.searchParams; + const namespaceParam = searchParams.get("namespace"); + const key = searchParams.get("key"); + + if (!namespaceParam || !key) { + return new NextResponse( + JSON.stringify({ error: "Missing namespace or key" }), + { + status: 400, + headers: { "Content-Type": "application/json" }, + } + ); + } + + // Parse the namespace from URL-encoded string to an array of strings + const namespace: string = decodeURIComponent(namespaceParam); + const namespaceArr: string[] = namespace.split("."); + + if (!Array.isArray(namespaceArr)) { + return new NextResponse( + JSON.stringify({ error: "Invalid namespace format" }), + { + status: 400, + headers: { "Content-Type": "application/json" }, + } + ); + } + + const lgClient = new Client({ + apiKey: process.env.LANGCHAIN_API_KEY, + apiUrl: process.env.LANGGRAPH_API_URL, + }); + + try { + const result = await lgClient.store.getItem(namespaceArr, key); + + return new NextResponse(JSON.stringify(result ?? {}), { + headers: { "Content-Type": "application/json" }, + }); + } catch (e) { + console.error("Err fetching store"); + console.error(e); + return new NextResponse( + JSON.stringify({ error: "Failed to get item from store" }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } +} diff --git a/src/app/api/system_rules/get/route.ts b/src/app/api/system_rules/get/route.ts new file mode 100644 index 00000000..b8bf2f9f --- /dev/null +++ b/src/app/api/system_rules/get/route.ts @@ -0,0 +1,85 @@ +import { createClient } from "@supabase/supabase-js"; +import { NextRequest, NextResponse } from "next/server"; + +export async function GET(req: NextRequest) { + if ( + !process.env.SUPABASE_SERVICE_ROLE_KEY || + !process.env.NEXT_PUBLIC_SUPABASE_URL + ) { + return new NextResponse( + JSON.stringify({ + error: + "SUPABASE_SERVICE_ROLE_KEY and NEXT_PUBLIC_SUPABASE_URL must be set", + }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + + // Initialize Supabase client + const supabase = createClient( + process.env.NEXT_PUBLIC_SUPABASE_URL, + process.env.SUPABASE_SERVICE_ROLE_KEY + ); + + const searchParams = req.nextUrl.searchParams; + const userId = searchParams.get("userId"); + const assistantId = searchParams.get("assistantId"); + + if (!userId || !assistantId) { + return new NextResponse( + JSON.stringify({ error: "Missing userId or assistantId" }), + { + status: 400, + headers: { "Content-Type": "application/json" }, + } + ); + } + + try { + // Fetch the latest system rules + const { data, error } = await supabase + .from("user_rules") + .select("system_rules") + .eq("user_id", userId) + .eq("assistant_id", assistantId) + .limit(1) + .single(); + + if (error) { + console.error("Error getting system rules:", { + error, + }); + return new NextResponse( + JSON.stringify({ error: "Failed to get system rules." }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + + if (!data) { + return new NextResponse(JSON.stringify({ error: "No rules found" }), { + status: 404, + headers: { "Content-Type": "application/json" }, + }); + } + + return new NextResponse(JSON.stringify(data), { + status: 200, + headers: { "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("Error fetching system rules:", error); + return new NextResponse( + JSON.stringify({ error: "Failed to fetch system rules" }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } +} diff --git a/src/app/api/system_rules/put/route.ts b/src/app/api/system_rules/put/route.ts new file mode 100644 index 00000000..06dbfa20 --- /dev/null +++ b/src/app/api/system_rules/put/route.ts @@ -0,0 +1,82 @@ +import { NextRequest, NextResponse } from "next/server"; +import { createClient } from "@supabase/supabase-js"; + +export async function POST(req: NextRequest) { + if ( + !process.env.SUPABASE_SERVICE_ROLE_KEY || + !process.env.NEXT_PUBLIC_SUPABASE_URL + ) { + return new NextResponse( + JSON.stringify({ + error: + "SUPABASE_SERVICE_ROLE_KEY and NEXT_PUBLIC_SUPABASE_URL must be set", + }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + + // Initialize Supabase client + const supabase = createClient( + process.env.NEXT_PUBLIC_SUPABASE_URL, + process.env.SUPABASE_SERVICE_ROLE_KEY + ); + + const { assistantId, userId, systemRules } = await req.json(); + + if (!userId || !assistantId || !systemRules) { + return new NextResponse( + JSON.stringify({ + error: "Missing userId, assistantId, or an array of systemRules.", + }), + { + status: 400, + headers: { "Content-Type": "application/json" }, + } + ); + } + + try { + // Insert new row into user_rules table + const { data, error } = await supabase + .from("user_rules") + .upsert( + { + user_id: userId, + assistant_id: assistantId, + system_rules: systemRules, + }, + { onConflict: "user_id,assistant_id", ignoreDuplicates: false } + ) + .select(); + + if (error) { + console.error("Error inserting system rules:", { + error, + }); + return new NextResponse( + JSON.stringify({ error: "Failed to insert system rules." }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + + return new NextResponse(JSON.stringify(data), { + status: 200, + headers: { "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("Error inserting system rules:", error); + return new NextResponse( + JSON.stringify({ error: "Failed to insert system rules." }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } +} diff --git a/src/app/favicon.ico b/src/app/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..18bbd3ff2b6ddd61abf528969f51aac28a400c0f GIT binary patch literal 15406 zcmeHN>r)g*5Z_et2bd4}rU}MWv=U9M#5~mF;e+?X6BGngL?MX5hy{XZAV@%wh=Pd9 zLsSqDltTpMI1WV=M6r^pJgSmtsfn7DnwY5h-szrY*1Nquj=M9K73Zp^=5}_v`!_v3 zGrip-NiRq*O5?^!SbIpTUy`H`B}ww|ke{c#EJ-!kHhucY`!^+N@D)j#f_;z#Ddcvn zG=H;o<3SX)4kmz3XIn^lYPHJ&!xYi=i*;-P5(zqJHE@#F9hnV=9QU~ z1HG3Yu~JcQgf*#aAiuaA4z~;--|eu!u1k5JnVn-p-`M`O^8T~*RQ7&%^>Nr%QVF$& z3lNtO1^W)Svp&>UEuYhyhW>=P^Cm-TW;*Lj<5;(0J#5M=fP$TSaeTjk7JbGD{`DI( zASyNp%)<{9JmyOeAg`ztHh;Mlx`*xq^;OF!oqUeEA^s)}-H{GQ)1NiJ3CLfosOMQc zYiuXalzAzLqb*+n&T6Caq0`$_5jhI{!P+ z`P)6lLVby*3TcUlvsk?6OoFl+gK|!D|AT5Xp0}3IWBFfpzyQQ^tT=Y;t+kO+S>Mh4 zp4`Hn%x7pZ{RF!z8W|q)4L0U(f%MIrfZDY%?I^Dsll;h-0O%clsLA82^24wo+!K~8 z4S}$QvtesV1?v|Zr-v`L6+>ZZ6}Ic3sp~sidgK`y9pDfH`BGZ=JI>yM@W^=zePHnW z%$Fpe$k%@wm@mti$*2pNBC0ENDM_Q^7D6`D*=>pL??J26%aUFzgA*am)`@&yjKPE+ns84GGEd z`2IeVNzb$fdEc?cJO=xtIk8-Sow$#Lq8@4emLF%eStzzFS6Xdlt`7fysJKg+7En zo^Knl7Z-S5+9^(gAzEn#77-_TzSn}-3&s2k8xS@iY(Ut6umNEM{|_6GV-!3t?oRzQ z^x-q-4bVq;Lso7c)MAdE;;zPlyU;aw$E`viz*7ZH!x2V9F>`_R|+6XD@Q&_zG#OZ6$cKSMt)ze)F!q_!%hsDizVf?nHxeq$} zZ`sV}=?x55f23S0pavzj? zp*VTxx!aJoDU0bdJaR7h1-ygt_ylOy%7-9sJ|`rbFz`6HGjrh|I2{fi>sNSjo?3O{ zns}IpIRbB=Nf5el7Wm=3MK2G7-#e1K;k?u}*?dj&SqM zO??oE`IyvW#;6v+*;zND}lzhXo(+0FAHU7BB$ZCEBI;Mb9#F< z<-It5!cObB)qKS+-H{U)*_!qCo``){vGEgjS|j`GI?=|a;$GZrL+^_Hu=R_4)!1px z8ZX?#z3Ciy;vF`QpY}^RH)~77c@R#@IWCP0XF0sedXuuIn{gbpZt{2Rv64yo-(dq^yJF^Y47}^vo#v^q4OmFb3Be?A~;$rmU#Go+KXuar*nX8KN#z*E%{dY=Wd*5 zSI;lPrsY474eU910?(*vEccv}wibPXTUI*b7)Ps#56#*Bx>HbGUdwW_>a(7F7sBAk zRz`bIJ`~NN3GZ=Q;vwC1p8w6N&GNlF|C#^h;<>9emVn^*p1S``GPSU{+9rR$_{iTc z=%aP?-vpzZ-!Am+Lw5_v{{GAF7yDzT{nOnd?*sN_h1HuM`4G0?$HXv+3 S*ub-619&}1{DcNv*1*5(vRkhJ literal 0 HcmV?d00001 diff --git a/src/app/globals.css b/src/app/globals.css new file mode 100644 index 00000000..046fb63d --- /dev/null +++ b/src/app/globals.css @@ -0,0 +1,103 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +:root { + --foreground-rgb: 0, 0, 0; + --background-start-rgb: 214, 219, 220; + --background-end-rgb: 255, 255, 255; +} + +@media (prefers-color-scheme: dark) { + :root { + --foreground-rgb: 255, 255, 255; + --background-start-rgb: 0, 0, 0; + --background-end-rgb: 0, 0, 0; + } +} +*::-webkit-scrollbar { + display: none; +} + +a { + color: rgb(33, 118, 246); +} + +@layer utilities { + .text-balance { + text-wrap: balance; + } + .no-scrollbar::-webkit-scrollbar { + display: none; + } + + .no-scrollbar { + -ms-overflow-style: none; /* IE and Edge */ + scrollbar-width: none; /* Firefox */ + } +} + +@layer base { + :root { + --background: 0 0% 100%; + --foreground: 0 0% 3.9%; + --card: 0 0% 100%; + --card-foreground: 0 0% 3.9%; + --popover: 0 0% 100%; + --popover-foreground: 0 0% 3.9%; + --primary: 0 0% 9%; + --primary-foreground: 0 0% 98%; + --secondary: 0 0% 96.1%; + --secondary-foreground: 0 0% 9%; + --muted: 0 0% 96.1%; + --muted-foreground: 0 0% 45.1%; + --accent: 0 0% 96.1%; + --accent-foreground: 0 0% 9%; + --destructive: 0 84.2% 60.2%; + --destructive-foreground: 0 0% 98%; + --border: 0 0% 89.8%; + --input: 0 0% 89.8%; + --ring: 0 0% 3.9%; + --chart-1: 12 76% 61%; + --chart-2: 173 58% 39%; + --chart-3: 197 37% 24%; + --chart-4: 43 74% 66%; + --chart-5: 27 87% 67%; + --radius: 0.5rem; + } + .dark { + --background: 0 0% 3.9%; + --foreground: 0 0% 98%; + --card: 0 0% 3.9%; + --card-foreground: 0 0% 98%; + --popover: 0 0% 3.9%; + --popover-foreground: 0 0% 98%; + --primary: 0 0% 98%; + --primary-foreground: 0 0% 9%; + --secondary: 0 0% 14.9%; + --secondary-foreground: 0 0% 98%; + --muted: 0 0% 14.9%; + --muted-foreground: 0 0% 63.9%; + --accent: 0 0% 14.9%; + --accent-foreground: 0 0% 98%; + --destructive: 0 62.8% 30.6%; + --destructive-foreground: 0 0% 98%; + --border: 0 0% 14.9%; + --input: 0 0% 14.9%; + --ring: 0 0% 83.1%; + --chart-1: 220 70% 50%; + --chart-2: 160 60% 45%; + --chart-3: 30 80% 55%; + --chart-4: 280 65% 60%; + --chart-5: 340 75% 55%; + } +} + +@layer base { + * { + @apply border-border; + } + body { + @apply bg-background text-foreground; + } +} diff --git a/src/app/layout.tsx b/src/app/layout.tsx new file mode 100644 index 00000000..178ba72e --- /dev/null +++ b/src/app/layout.tsx @@ -0,0 +1,22 @@ +import type { Metadata } from "next"; +import { Inter } from "next/font/google"; +import "./globals.css"; + +const inter = Inter({ subsets: ["latin"] }); + +export const metadata: Metadata = { + title: "Streaming UI chat", + description: "Streaming UI chat", +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + {children} + + ); +} diff --git a/src/app/page.tsx b/src/app/page.tsx new file mode 100644 index 00000000..45a93484 --- /dev/null +++ b/src/app/page.tsx @@ -0,0 +1,78 @@ +"use client"; +import { WelcomeDialog } from "@/components/WelcomeDialog"; +import { useState } from "react"; +import { useRules } from "@/hooks/useRules"; +import { GeneratedRulesDialog } from "@/components/GeneratedRulesDialog"; +import { ContentComposerChatInterface } from "@/components/ContentComposer"; +import { useGraph } from "@/hooks/useGraph"; +import { SystemRulesDialog } from "@/components/SystemRulesDialog"; +import { useUser } from "@/hooks/useUser"; +import { AssistantsDropdown } from "@/components/AssistantsDropdown"; + +export default function Home() { + const [refreshAssistants, setRefreshAssistants] = useState< + () => Promise + >(() => () => Promise.resolve()); + const { userId } = useUser(); + const { + createAssistant, + sendMessage, + streamMessage, + assistantId, + setAssistantId, + isGetAssistantsLoading, + getAssistantsByUserId, + updateAssistantMetadata, + } = useGraph({ userId, refreshAssistants }); + const { + userRules, + isLoadingUserRules, + setSystemRules, + systemRules, + setSystemRulesAndSave, + isLoadingSystemRules, + getUserRules, + } = useRules({ assistantId, userId }); + + return ( +
+ Promise) => + setRefreshAssistants(() => callback) + } + /> + + + sendMessage(params, getUserRules)} + streamMessage={streamMessage} + userId={userId} + /> + +
+ ); +} diff --git a/src/components/AssistantsDropdown.tsx b/src/components/AssistantsDropdown.tsx new file mode 100644 index 00000000..f5d83fa5 --- /dev/null +++ b/src/components/AssistantsDropdown.tsx @@ -0,0 +1,155 @@ +import { type Assistant } from "@langchain/langgraph-sdk"; +import { useCallback, useEffect, useState } from "react"; +import { cn } from "@/lib/utils"; +import { Loader } from "lucide-react"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuLabel, + DropdownMenuRadioGroup, + DropdownMenuRadioItem, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "./ui/dropdown-menu"; +import { Button } from "./ui/button"; +import { NewAssistantDialog } from "./NewAssistantDialog"; + +export interface AssistantsDropdownProps { + selectedAssistantId: string | undefined; + isGetAssistantsLoading: boolean; + getAssistantsByUserId: (userId: string) => Promise; + setAssistantId: (assistantId: string) => void; + userId: string | undefined; + createAssistant: ( + graphId: string, + userId: string, + extra?: { + assistantName?: string; + assistantDescription?: string; + overrideExisting?: boolean; + } + ) => Promise; + onAssistantUpdate: (callback: () => Promise) => void; +} + +export function AssistantsDropdown(props: AssistantsDropdownProps) { + const [assistants, setAssistants] = useState([]); + const [selectedAssistant, setSelectedAssistant] = useState( + null + ); + const [isLoading, setIsLoading] = useState(true); + + const fetchAssistants = useCallback(async () => { + if (!props.userId || !props.selectedAssistantId) return; + setIsLoading(true); + let assistants = await props.getAssistantsByUserId(props.userId); + if (props.selectedAssistantId) { + const currentSelectedAssistant = assistants.find( + (a) => a.assistant_id === props.selectedAssistantId + ); + + if (currentSelectedAssistant) { + const otherAssistants = assistants.filter( + (a) => a.assistant_id !== props.selectedAssistantId + ); + assistants = [currentSelectedAssistant, ...otherAssistants]; + setSelectedAssistant(currentSelectedAssistant); + } + } + setAssistants(assistants); + setIsLoading(false); + }, [props.userId, props.selectedAssistantId]); + + useEffect(() => { + if (!props.userId || assistants.length > 0) { + setIsLoading(false); + } + void fetchAssistants(); + }, [props.userId, props.selectedAssistantId, assistants.length]); + + useEffect(() => { + props.onAssistantUpdate(fetchAssistants); + }, [props.onAssistantUpdate]); + + const handleChangeAssistant = (assistantId: string) => { + if (assistantId === props.selectedAssistantId) return; + + props.setAssistantId(assistantId); + // Force page reload + window.location.reload(); + }; + + const defaultButtonValue = isLoading ? ( +

+ Loading assistants + +

+ ) : ( + "Select assistant" + ); + + return ( +
+ + + + + + Assistants + + + {props.isGetAssistantsLoading ? ( +

+ Fetching assistants + +

+ ) : assistants.length ? ( + assistants.map((assistant, idx) => { + const assistantName = + (assistant.metadata?.assistantName as string | undefined) || + `My assistant ${idx + 1}`; + const assistantDescription = assistant.metadata + ?.assistantDescription as string | undefined; + return ( + +
+

{assistantName}

+ {assistantDescription && ( +

+ {assistantDescription} +

+ )} +
+
+ ); + }) + ) : ( +

+ No assistants found. Please create one. +

+ )} +
+ + + + +
+
+
+ ); +} diff --git a/src/components/ContentComposer.tsx b/src/components/ContentComposer.tsx new file mode 100644 index 00000000..e5b2b3c1 --- /dev/null +++ b/src/components/ContentComposer.tsx @@ -0,0 +1,212 @@ +"use client"; + +import React, { useState } from "react"; +import { + AppendMessage, + AssistantRuntimeProvider, + useExternalStoreRuntime, +} from "@assistant-ui/react"; +import { v4 as uuidv4 } from "uuid"; +import { MyThread } from "./Primitives"; +import { processStream } from "@/lib/process_event"; +import { useExternalMessageConverter } from "@assistant-ui/react"; +import { AIMessage, BaseMessage, HumanMessage } from "@langchain/core/messages"; +import { + convertLangchainMessages, + convertToOpenAIFormat, +} from "@/lib/convert_messages"; +import { GraphInput } from "@/hooks/useGraph"; +import { Toaster } from "./ui/toaster"; +import { useToast } from "@/hooks/use-toast"; +import { Loader } from "lucide-react"; +import { type Assistant } from "@langchain/langgraph-sdk"; + +export interface ContentComposerChatInterfaceProps { + systemRules: string | undefined; + streamMessage: (params: GraphInput) => Promise< + AsyncGenerator< + { + event: string; + data: any; + }, + any, + unknown + > + >; + sendMessage: (params: GraphInput) => Promise>; + createAssistant: ( + graphId: string, + userId: string, + extra?: { + assistantName?: string; + assistantDescription?: string; + overrideExisting?: boolean; + } + ) => Promise; + userId: string | undefined; +} + +export function ContentComposerChatInterface( + props: ContentComposerChatInterfaceProps +): React.ReactElement { + const { systemRules, streamMessage, sendMessage } = props; + // Only messages which are rendered in the UI. This mainly excludes revised messages. + const [renderedMessages, setRenderedMessages] = useState([]); + // Messages which contain revisions are not rendered. + const [allMessages, setAllMessages] = useState([]); + const [isRunning, setIsRunning] = useState(false); + // Use this state field to determine whether or not to generate insights. + const [contentGenerated, setContentGenerated] = useState(false); + const { toast } = useToast(); + + async function onNew(message: AppendMessage): Promise { + if (message.content[0]?.type !== "text") { + throw new Error("Only text messages are supported"); + } + setIsRunning(true); + + try { + const humanMessage = new HumanMessage({ + content: message.content[0].text, + id: uuidv4(), + }); + const currentConversation = [...renderedMessages, humanMessage]; + + setRenderedMessages(currentConversation); + setAllMessages((prevMessages) => [...prevMessages, humanMessage]); + + const response = await streamMessage({ + messages: currentConversation.map(convertToOpenAIFormat), + hasAcceptedText: false, + contentGenerated, + systemRules, + }); + + const fullMessage = await processStream(response, { + setRenderedMessages, + setContentGenerated, + }); + setAllMessages((prevMessages) => [...prevMessages, fullMessage]); + } catch (error) { + console.error("Error running message:", error); + } finally { + setIsRunning(false); + } + } + + /** + * Handle updating the state values for `renderedMessages` as well as + * `allMessages` when a message is edited. Then, trigger a new request + * passing `hasAcceptedText` as true to the API so that rules are generated. + * Since we're only making the request to generate rules, we don't need to + * update the rendered messages with the response. + */ + const onEdit = async (message: AppendMessage): Promise => { + if (message.content[0]?.type !== "text") { + throw new Error("Only text messages are supported"); + } + // Create a new assistant message with the revision + const newMessage = new AIMessage({ + content: message.content[0].text, + id: uuidv4(), + }); + const revisedMessage = new HumanMessage({ + id: newMessage.id, + content: `REVISED MESSAGE:\n${newMessage.content}`, + }); + + const indexOfMessage = Number(message.parentId) + 1; + + // Updates the rendered message with the revised message + setRenderedMessages((prevMessages) => [ + ...prevMessages.slice(0, indexOfMessage), + newMessage, + ...prevMessages.slice(indexOfMessage + 1), + ]); + + const currentConversation: BaseMessage[] = [ + ...allMessages.slice(0, indexOfMessage + 1), + revisedMessage, + ...allMessages.slice(indexOfMessage + 1), + ]; + // Insert the revised message directly after the original in the conversation history + setAllMessages((prevMessages) => [ + ...prevMessages.slice(0, indexOfMessage + 1), + revisedMessage, + ...prevMessages.slice(indexOfMessage + 1), + ]); + + // Do not generate insights if content hasn't been generated + if (!contentGenerated) return; + + try { + toast({ + description: ( +

+ Generating rules + +

+ ), + duration: 10000, + }); + await sendMessage({ + messages: currentConversation.map(convertToOpenAIFormat), + hasAcceptedText: true, + contentGenerated: true, + systemRules, + }); + toast({ + title: "Successfully generated rules ✅", + duration: 2500, + }); + } catch (error) { + console.error("Error editing message:", error); + } + }; + + const threadMessages = useExternalMessageConverter({ + callback: convertLangchainMessages, + messages: renderedMessages, + isRunning, + }); + + const runtime = useExternalStoreRuntime({ + messages: threadMessages, + isRunning, + onNew, + onEdit, + }); + + return ( +
+ + { + // Do not generate insights if content hasn't been generated + if (!contentGenerated) return; + toast({ + description: ( +

+ Generating rules + +

+ ), + duration: 10000, + }); + await sendMessage({ + messages: allMessages.map(convertToOpenAIFormat), + hasAcceptedText: true, + contentGenerated: true, + systemRules, + }); + toast({ + title: "Successfully generated rules ✅", + duration: 2500, + }); + }} + /> +
+ +
+ ); +} diff --git a/src/components/GeneratedRulesDialog.tsx b/src/components/GeneratedRulesDialog.tsx new file mode 100644 index 00000000..dbf181fe --- /dev/null +++ b/src/components/GeneratedRulesDialog.tsx @@ -0,0 +1,110 @@ +import { useState } from "react"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogDescription, + DialogTrigger, +} from "./ui/dialog"; +import { Button } from "./ui/button"; +import { Loader } from "lucide-react"; +import { UserRules } from "@/types"; + +export interface GeneratedRulesProps { + isLoadingUserRules: boolean; + userRules: UserRules | undefined; +} + +export function GeneratedRulesDialog(props: GeneratedRulesProps) { + const { userRules, isLoadingUserRules } = props; + const [open, setOpen] = useState(false); + + return ( + + +
setOpen(true)} + className="fixed top-4 right-36 bg-black hover:bg-gray-800 text-white px-4 py-2 rounded-md shadow-sm transition-colors duration-200 cursor-pointer flex items-center space-x-2" + > +

Generated Rules

+
+
+ + + + Generated Rules + + + {isLoadingUserRules + ? "Loading rules..." + : userRules?.contentRules || userRules?.styleRules + ? "Current rules generated by the assistant for content generation." + : "No rules have been generated yet. Follow the steps below to generate rules."} + + +
+ {isLoadingUserRules ? ( +
+ +
+ ) : userRules?.contentRules || userRules?.styleRules ? ( + <> + {userRules.styleRules && ( +
+

+ Style Rules: +

+
    + {userRules.styleRules.map((rule, index) => ( +
  • + {rule} +
  • + ))} +
+
+ )} + {userRules.contentRules && ( +
+

+ Content Rules: +

+
    + {userRules.contentRules.map((rule, index) => ( +
  • + {rule} +
  • + ))} +
+
+ )} + + ) : ( +
+

To generate rules:

+
    +
  1. Ask the assistant to generate some writing content
  2. +
  3. Revise & save, or copy the generated content
  4. +
  5. This will trigger rule generation
  6. +
+

+ Once rules are generated, they will appear here.{" "} + + (You may need to refresh the page first) + +

+
+ )} +
+
+ +
+
+
+ ); +} diff --git a/src/components/NewAssistantDialog.tsx b/src/components/NewAssistantDialog.tsx new file mode 100644 index 00000000..26c32043 --- /dev/null +++ b/src/components/NewAssistantDialog.tsx @@ -0,0 +1,116 @@ +import { useState } from "react"; +import { Button } from "./ui/button"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "./ui/dialog"; +import { PlusCircleIcon, Loader } from "lucide-react"; +import { Input } from "./ui/input"; +import { Textarea } from "./ui/textarea"; +import { type Assistant } from "@langchain/langgraph-sdk"; + +export interface NewAssistantDialogProps { + createAssistant: ( + graphId: string, + userId: string, + extra?: { + assistantName?: string; + assistantDescription?: string; + overrideExisting?: boolean; + } + ) => Promise; + userId: string | undefined; +} + +export function NewAssistantDialog(props: NewAssistantDialogProps) { + const [open, setOpen] = useState(false); + const [isLoading, setIsLoading] = useState(false); + const [name, setName] = useState(""); + const [description, setDescription] = useState(""); + + const handleCreateNewAssistant = async ( + e: React.FormEvent + ) => { + e.preventDefault(); + if (!props.userId) { + throw new Error("User ID is required"); + } + setIsLoading(true); + await props.createAssistant( + process.env.NEXT_PUBLIC_LANGGRAPH_GRAPH_ID ?? "", + props.userId, + { + assistantName: name, + assistantDescription: description, + overrideExisting: true, + } + ); + // Force page reload + window.location.reload(); + }; + + return ( + + +

setOpen(true)} + > + Create assistant + +

+
+ + + + Create a new assistant + + +
+
+ + setName(e.target.value)} + placeholder="Tweet writer" + className="w-full" + /> +
+
+ +