From 8e26d22dfb0d924ab8391ff2782531b2be4650d3 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Wed, 7 Feb 2024 00:17:26 -0800 Subject: [PATCH 01/25] Include stats in readProject (#408) and speed up int tests --- .../python-integration-tests/action.yml | 2 +- python/Makefile | 4 +- python/langsmith/client.py | 23 ++++ python/poetry.lock | 101 +++++++++++++++--- python/pyproject.toml | 1 + python/tests/integration_tests/test_client.py | 25 +++-- python/tests/integration_tests/test_runs.py | 16 +-- 7 files changed, 134 insertions(+), 38 deletions(-) diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index bb6a4decd..4fb91cc06 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -37,6 +37,6 @@ runs: LANGCHAIN_TRACING_V2: "true" LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key }} OPENAI_API_KEY: ${{ inputs.openai-api-key }} - run: make integration_tests + run: make integration_tests_fast shell: bash working-directory: python diff --git a/python/Makefile b/python/Makefile index 75312826f..d6eaebfd4 100644 --- a/python/Makefile +++ b/python/Makefile @@ -7,10 +7,10 @@ tests_watch: poetry run ptw --now . -- -vv -x tests/unit_tests integration_tests: - poetry run pytest tests/integration_tests + poetry run pytest -v --durations=10 --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests integration_tests_fast: - poetry run pytest -n auto tests/integration_tests + poetry run pytest -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests lint: poetry run ruff . diff --git a/python/langsmith/client.py b/python/langsmith/client.py index f535051f8..6bde27f0d 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -1655,6 +1655,29 @@ def read_project( **response.json(), _host_url=self._host_url ) + def has_project( + self, project_name: str, *, project_id: Optional[str] = None + ) -> bool: + """Check if a project exists. + + Parameters + ---------- + project_name : str + The name of the project to check for. + project_id : str or None, default=None + The ID of the project to check for. + + Returns + ------- + bool + Whether the project exists. + """ + try: + self.read_project(project_name=project_name) + except ls_utils.LangSmithNotFoundError: + return False + return True + def get_test_results( self, *, diff --git a/python/poetry.lock b/python/poetry.lock index 153311fe7..fd5b73d79 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -337,6 +337,73 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "dataclasses-json" version = "0.6.1" @@ -645,7 +712,6 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] @@ -1095,6 +1161,24 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "pytest-subtests" version = "0.11.0" @@ -1171,7 +1255,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1179,16 +1262,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1205,7 +1280,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1213,7 +1287,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1639,4 +1712,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "f452b10f4c30cc0e4d78d5837ae0af3d044348dc76a9b0b2b52df66b0b8bb0a3" +content-hash = "54bcb7f0ce64b9375ca8b3a70932f3e4ae020e2e0e021e7d6fede195186de4a2" diff --git a/python/pyproject.toml b/python/pyproject.toml index 43a638cdf..326039483 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -47,6 +47,7 @@ freezegun = "^1.2.2" pytest-subtests = "^0.11.0" pytest-watcher = "^0.3.4" pytest-xdist = "^3.5.0" +pytest-cov = "^4.1.0" [tool.poetry.group.lint.dependencies] openai = "^1.10" diff --git a/python/tests/integration_tests/test_client.py b/python/tests/integration_tests/test_client.py index d6e4ecff5..369861952 100644 --- a/python/tests/integration_tests/test_client.py +++ b/python/tests/integration_tests/test_client.py @@ -29,14 +29,9 @@ def langchain_client(monkeypatch: pytest.MonkeyPatch) -> Client: def test_projects(langchain_client: Client, monkeypatch: pytest.MonkeyPatch) -> None: """Test projects.""" - project_names = set([project.name for project in langchain_client.list_projects()]) new_project = "__Test Project" - if new_project in project_names: + if langchain_client.has_project(new_project): langchain_client.delete_project(project_name=new_project) - project_names = set( - [project.name for project in langchain_client.list_projects()] - ) - assert new_project not in project_names monkeypatch.setenv("LANGCHAIN_ENDPOINT", "https://api.smith.langchain.com") langchain_client.create_project( @@ -45,17 +40,18 @@ def test_projects(langchain_client: Client, monkeypatch: pytest.MonkeyPatch) -> ) project = langchain_client.read_project(project_name=new_project) assert project.name == new_project - project_names = set([sess.name for sess in langchain_client.list_projects()]) - assert new_project in project_names runs = list(langchain_client.list_runs(project_name=new_project)) project_id_runs = list(langchain_client.list_runs(project_id=project.id)) - assert len(runs) == len(project_id_runs) == 0 # TODO: Add create_run method + assert len(runs) == len(project_id_runs) == 0 langchain_client.delete_project(project_name=new_project) with pytest.raises(LangSmithError): langchain_client.read_project(project_name=new_project) assert new_project not in set( - [sess.name for sess in langchain_client.list_projects()] + [ + sess.name + for sess in langchain_client.list_projects(name_contains=new_project) + ] ) with pytest.raises(LangSmithError): langchain_client.delete_project(project_name=new_project) @@ -392,15 +388,18 @@ def test_batch_ingest_runs(langchain_client: Client) -> None: langchain_client.batch_ingest_runs(create=runs_to_create, update=runs_to_update) runs = [] wait = 2 - for _ in range(5): + for _ in range(15): try: - runs = list(langchain_client.list_runs(project_name=_session)) + runs = list( + langchain_client.list_runs( + project_name=_session, run_ids=[str(trace_id), str(run_id_2)] + ) + ) if len(runs) == 2: break raise LangSmithError("Runs not created yet") except LangSmithError: time.sleep(wait) - wait += 4 else: raise ValueError("Runs not created in time") assert len(runs) == 2 diff --git a/python/tests/integration_tests/test_runs.py b/python/tests/integration_tests/test_runs.py index 009b59160..20b58a251 100644 --- a/python/tests/integration_tests/test_runs.py +++ b/python/tests/integration_tests/test_runs.py @@ -52,7 +52,7 @@ def test_nested_runs( langchain_client: Client, ): project_name = "__My Tracer Project - test_nested_runs" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) executor = ThreadPoolExecutor(max_workers=1) @@ -100,7 +100,7 @@ def my_chain_run(text: str): async def test_nested_async_runs(langchain_client: Client): """Test nested runs with a mix of async and sync functions.""" project_name = "__My Tracer Project - test_nested_async_runs" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) executor = ThreadPoolExecutor(max_workers=1) @@ -149,7 +149,7 @@ async def my_chain_run(text: str): async def test_nested_async_runs_with_threadpool(langchain_client: Client): """Test nested runs with a mix of async and sync functions.""" project_name = "__My Tracer Project - test_nested_async_runs_with_threadpol" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) @traceable(run_type="llm") @@ -219,7 +219,7 @@ async def my_chain_run(text: str, run_tree: RunTree): async def test_context_manager(langchain_client: Client) -> None: project_name = "__My Tracer Project - test_context_manager" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) @traceable(run_type="llm") @@ -246,7 +246,7 @@ async def my_llm(prompt: str) -> str: async def test_sync_generator(langchain_client: Client): project_name = "__My Tracer Project - test_sync_generator" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) @traceable(run_type="chain") @@ -268,7 +268,7 @@ def my_generator(num: int) -> Generator[str, None, None]: async def test_sync_generator_reduce_fn(langchain_client: Client): project_name = "__My Tracer Project - test_sync_generator_reduce_fn" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) def reduce_fn(outputs: list) -> dict: @@ -295,7 +295,7 @@ def my_generator(num: int) -> Generator[str, None, None]: async def test_async_generator(langchain_client: Client): project_name = "__My Tracer Project - test_async_generator" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) @traceable(run_type="chain") @@ -335,7 +335,7 @@ async def my_async_generator(num: int) -> AsyncGenerator[str, None]: async def test_async_generator_reduce_fn(langchain_client: Client): project_name = "__My Tracer Project - test_async_generator_reduce_fn" - if project_name in [project.name for project in langchain_client.list_projects()]: + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) def reduce_fn(outputs: list) -> dict: From cf733d4999961f5c37aab5edb66b8237599117fd Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Wed, 7 Feb 2024 01:06:59 -0800 Subject: [PATCH 02/25] Adds auto batch tracer support for JS (#406) CC @nfcampos @hinthornw Unit tests done. Supersedes #383 --- js/package.json | 1 + js/src/client.ts | 310 +++++++++++++++++++++++--- js/src/schemas.ts | 51 +++-- js/src/tests/batch_client.int.test.ts | 164 ++++++++++++++ js/src/tests/batch_client.test.ts | 289 ++++++++++++++++++++++++ js/src/tests/client.int.test.ts | 28 ++- 6 files changed, 788 insertions(+), 55 deletions(-) create mode 100644 js/src/tests/batch_client.int.test.ts create mode 100644 js/src/tests/batch_client.test.ts diff --git a/js/package.json b/js/package.json index a9506707d..a98eec527 100644 --- a/js/package.json +++ b/js/package.json @@ -2,6 +2,7 @@ "name": "langsmith", "version": "0.0.66", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", + "packageManager": "yarn@1.22.19", "files": [ "dist/", "client.cjs", diff --git a/js/src/client.ts b/js/src/client.ts index 181a5cd4d..07ddbd108 100644 --- a/js/src/client.ts +++ b/js/src/client.ts @@ -40,6 +40,8 @@ interface ClientConfig { webUrl?: string; hideInputs?: boolean; hideOutputs?: boolean; + autoBatchTracing?: boolean; + pendingAutoBatchedRunLimit?: number; } interface ListRunsParams { @@ -56,6 +58,7 @@ interface ListRunsParams { query?: string; filter?: string; } + interface UploadCSVParams { csvFile: Blob; fileName: string; @@ -105,9 +108,15 @@ interface CreateRunParams { parent_run_id?: string; project_name?: string; revision_id?: string; + trace_id?: string; + dotted_order?: string; +} + +interface UpdateRunParams extends RunUpdate { + id?: string; } -interface projectOptions { +interface ProjectOptions { projectName?: string; projectId?: string; } @@ -121,6 +130,51 @@ export type CreateExampleOptions = { exampleId?: string; }; +type AutoBatchQueueItem = { + action: "create" | "update"; + item: RunCreate | RunUpdate; +}; + +async function mergeRuntimeEnvIntoRunCreates(runs: RunCreate[]) { + const runtimeEnv = await getRuntimeEnvironment(); + const envVars = getLangChainEnvVarsMetadata(); + return runs.map((run) => { + const extra = run.extra ?? {}; + const metadata = extra.metadata; + run.extra = { + ...extra, + runtime: { + ...runtimeEnv, + ...extra?.runtime, + }, + metadata: { + ...envVars, + ...(envVars.revision_id || run.revision_id + ? { revision_id: run.revision_id ?? envVars.revision_id } + : {}), + ...metadata, + }, + }; + return run; + }); +} + +const getTracingSamplingRate = () => { + const samplingRateStr = getEnvironmentVariable( + "LANGCHAIN_TRACING_SAMPLING_RATE" + ); + if (samplingRateStr === undefined) { + return undefined; + } + const samplingRate = parseFloat(samplingRateStr); + if (samplingRate < 0 || samplingRate > 1) { + throw new Error( + `LANGCHAIN_TRACING_SAMPLING_RATE must be between 0 and 1 if set. Got: ${samplingRate}` + ); + } + return samplingRate; +}; + // utility functions const isLocalhost = (url: string): boolean => { const strippedUrl = url.replace("http://", "").replace("https://", ""); @@ -182,9 +236,26 @@ export class Client { private hideOutputs?: boolean; + private tracingSampleRate?: number; + + private sampledPostUuids = new Set(); + + private autoBatchTracing = true; + + private pendingAutoBatchedRuns: AutoBatchQueueItem[] = []; + + private pendingAutoBatchedRunLimit = 100; + + private autoBatchTimeout: ReturnType | undefined; + + private autoBatchInitialDelayMs = 250; + + private autoBatchAggregationDelayMs = 50; + constructor(config: ClientConfig = {}) { const defaultConfig = Client.getDefaultClientConfig(); + this.tracingSampleRate = getTracingSamplingRate(); this.apiUrl = trimQuotes(config.apiUrl ?? defaultConfig.apiUrl) ?? ""; this.apiKey = trimQuotes(config.apiKey ?? defaultConfig.apiKey); this.webUrl = trimQuotes(config.webUrl ?? defaultConfig.webUrl); @@ -193,6 +264,9 @@ export class Client { this.caller = new AsyncCaller(config.callerOptions ?? {}); this.hideInputs = config.hideInputs ?? defaultConfig.hideInputs; this.hideOutputs = config.hideOutputs ?? defaultConfig.hideOutputs; + this.autoBatchTracing = config.autoBatchTracing ?? this.autoBatchTracing; + this.pendingAutoBatchedRunLimit = + config.pendingAutoBatchedRunLimit ?? this.pendingAutoBatchedRunLimit; } public static getDefaultClientConfig(): { @@ -273,6 +347,21 @@ export class Client { return outputs; } + private prepareRunCreateOrUpdateInputs(run: RunUpdate): RunUpdate; + private prepareRunCreateOrUpdateInputs(run: RunCreate): RunCreate; + private prepareRunCreateOrUpdateInputs( + run: RunCreate | RunUpdate + ): RunCreate | RunUpdate { + const runParams = { ...run }; + if (runParams.inputs !== undefined) { + runParams.inputs = this.processInputs(runParams.inputs); + } + if (runParams.outputs !== undefined) { + runParams.outputs = this.processOutputs(runParams.outputs); + } + return runParams; + } + private async _getResponse( path: string, queryParams?: URLSearchParams @@ -366,47 +455,201 @@ export class Client { } } + private _filterForSampling( + runs: CreateRunParams[] | UpdateRunParams[], + patch = false + ) { + if (this.tracingSampleRate === undefined) { + return runs; + } + + if (patch) { + const sampled = []; + for (const run of runs) { + if (this.sampledPostUuids.has(run.id)) { + sampled.push(run); + this.sampledPostUuids.delete(run.id); + } + } + return sampled; + } else { + const sampled = []; + for (const run of runs) { + if (Math.random() < this.tracingSampleRate) { + sampled.push(run); + this.sampledPostUuids.add(run.id); + } + } + return sampled; + } + } + + private async triggerAutoBatchSend(runs?: AutoBatchQueueItem[]) { + let batch = runs; + if (batch === undefined) { + batch = this.pendingAutoBatchedRuns.slice( + 0, + this.pendingAutoBatchedRunLimit + ); + this.pendingAutoBatchedRuns = this.pendingAutoBatchedRuns.slice( + this.pendingAutoBatchedRunLimit + ); + } + await this.batchIngestRuns({ + runCreates: batch + .filter((item) => item.action === "create") + .map((item) => item.item) as RunCreate[], + runUpdates: batch + .filter((item) => item.action === "update") + .map((item) => item.item) as RunUpdate[], + }); + } + + private appendRunCreateToAutoBatchQueue(item: AutoBatchQueueItem) { + const oldTimeout = this.autoBatchTimeout; + clearTimeout(this.autoBatchTimeout); + this.autoBatchTimeout = undefined; + this.pendingAutoBatchedRuns.push(item); + while ( + this.pendingAutoBatchedRuns.length >= this.pendingAutoBatchedRunLimit + ) { + const batch = this.pendingAutoBatchedRuns.slice( + 0, + this.pendingAutoBatchedRunLimit + ); + this.pendingAutoBatchedRuns = this.pendingAutoBatchedRuns.slice( + this.pendingAutoBatchedRunLimit + ); + void this.triggerAutoBatchSend(batch); + } + if (this.pendingAutoBatchedRuns.length > 0) { + if (!oldTimeout) { + this.autoBatchTimeout = setTimeout(() => { + this.autoBatchTimeout = undefined; + void this.triggerAutoBatchSend(); + }, this.autoBatchInitialDelayMs); + } else { + this.autoBatchTimeout = setTimeout(() => { + this.autoBatchTimeout = undefined; + void this.triggerAutoBatchSend(); + }, this.autoBatchAggregationDelayMs); + } + } + } + public async createRun(run: CreateRunParams): Promise { + if (!this._filterForSampling([run]).length) { + return; + } const headers = { ...this.headers, "Content-Type": "application/json" }; - const extra = run.extra ?? {}; - const metadata = extra.metadata; - const runtimeEnv = await getRuntimeEnvironment(); - const envVars = getLangChainEnvVarsMetadata(); const session_name = run.project_name; delete run.project_name; - const runCreate: RunCreate = { + + const runCreate: RunCreate = this.prepareRunCreateOrUpdateInputs({ session_name, ...run, - extra: { - ...run.extra, - runtime: { - ...runtimeEnv, - ...extra.runtime, - }, - metadata: { - ...envVars, - ...(envVars.revision_id || run.revision_id - ? { revision_id: run.revision_id ?? envVars.revision_id } - : {}), - ...metadata, - }, - }, - }; - runCreate.inputs = this.processInputs(runCreate.inputs); - if (runCreate.outputs) { - runCreate.outputs = this.processOutputs(runCreate.outputs); + start_time: run.start_time ?? Date.now(), + }); + if ( + this.autoBatchTracing && + runCreate.trace_id !== undefined && + runCreate.dotted_order !== undefined + ) { + this.appendRunCreateToAutoBatchQueue({ + action: "create", + item: runCreate, + }); + return; } - runCreate.start_time = run.start_time ?? Date.now(); + const mergedRunCreateParams = await mergeRuntimeEnvIntoRunCreates([ + runCreate, + ]); const response = await this.caller.call(fetch, `${this.apiUrl}/runs`, { method: "POST", headers, - body: JSON.stringify(runCreate), + body: JSON.stringify(mergedRunCreateParams[0]), signal: AbortSignal.timeout(this.timeout_ms), }); await raiseForStatus(response, "create run"); } + /** + * Batch ingest/upsert multiple runs in the Langsmith system. + * @param runs + */ + public async batchIngestRuns({ + runCreates, + runUpdates, + }: { + runCreates?: RunCreate[]; + runUpdates?: RunUpdate[]; + }) { + if (runCreates === undefined && runUpdates === undefined) { + return; + } + let preparedCreateParams = + runCreates?.map((create) => + this.prepareRunCreateOrUpdateInputs(create) + ) ?? []; + let preparedUpdateParams = + runUpdates?.map((update) => + this.prepareRunCreateOrUpdateInputs(update) + ) ?? []; + + if (preparedCreateParams.length > 0 && preparedUpdateParams.length > 0) { + const createById = preparedCreateParams.reduce( + (params: Record, run) => { + if (!run.id) { + return params; + } + params[run.id] = run; + return params; + }, + {} + ); + const standaloneUpdates = []; + for (const updateParam of preparedUpdateParams) { + if (updateParam.id !== undefined && createById[updateParam.id]) { + createById[updateParam.id] = { + ...createById[updateParam.id], + ...updateParam, + }; + } else { + standaloneUpdates.push(updateParam); + } + } + preparedCreateParams = Object.values(createById); + preparedUpdateParams = standaloneUpdates; + } + const body = { + post: this._filterForSampling(preparedCreateParams), + patch: this._filterForSampling(preparedUpdateParams, true), + }; + if (!body.post.length && !body.patch.length) { + return; + } + preparedCreateParams = await mergeRuntimeEnvIntoRunCreates( + preparedCreateParams + ); + const headers = { + ...this.headers, + "Content-Type": "application/json", + Accept: "application/json", + }; + const response = await this.caller.call( + fetch, + `${this.apiUrl}/runs/batch`, + { + method: "POST", + headers, + body: JSON.stringify(body), + signal: AbortSignal.timeout(this.timeout_ms), + } + ); + await raiseForStatus(response, "batch create run"); + } + public async updateRun(runId: string, run: RunUpdate): Promise { assertUuid(runId); if (run.inputs) { @@ -416,6 +659,19 @@ export class Client { if (run.outputs) { run.outputs = this.processOutputs(run.outputs); } + // TODO: Untangle types + const data: UpdateRunParams = { ...run, id: runId }; + if (!this._filterForSampling([data], true).length) { + return; + } + if ( + this.autoBatchTracing && + data.trace_id !== undefined && + data.dotted_order !== undefined + ) { + this.appendRunCreateToAutoBatchQueue({ action: "update", item: data }); + return; + } const headers = { ...this.headers, "Content-Type": "application/json" }; const response = await this.caller.call( fetch, @@ -449,7 +705,7 @@ export class Client { }: { runId?: string; run?: Run; - projectOpts?: projectOptions; + projectOpts?: ProjectOptions; }): Promise { if (run !== undefined) { let sessionId: string; diff --git a/js/src/schemas.ts b/js/src/schemas.ts index 78c33aed6..84bf0cc42 100644 --- a/js/src/schemas.ts +++ b/js/src/schemas.ts @@ -105,6 +105,23 @@ export interface BaseRun { /** Tags for further categorizing or annotating the run. */ tags?: string[]; + + /** Unique ID assigned to every run within this nested trace. **/ + trace_id?: string; + + /** + * The dotted order for the run. + * + * This is a string composed of {time}{run-uuid}.* so that a trace can be + * sorted in the order it was executed. + * + * Example: + * - Parent: 20230914T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8 + * - Children: + * - 20230914T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8.20230914T223155649Z809ed3a2-0172-4f4d-8a02-a64e9b7a0f8a + * - 20230915T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8.20230914T223155650Zc8d9f4c5-6c5a-4b2d-9b1c-3d9d7a7c5c7c + */ + dotted_order?: string; } /** @@ -150,30 +167,16 @@ export interface Run extends BaseRun { /** IDs of parent runs, if multiple exist. */ parent_run_ids?: string[]; - /**Unique ID assigned to every run within this nested trace.**/ - trace_id?: string; - - /** - * The dotted order for the run. - * - * This is a string composed of {time}{run-uuid}.* so that a trace can be - * sorted in the order it was executed. - * - * Example: - * - Parent: 20230914T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8 - * - Children: - * - 20230914T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8.20230914T223155649Z809ed3a2-0172-4f4d-8a02-a64e9b7a0f8a - * - 20230915T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8.20230914T223155650Zc8d9f4c5-6c5a-4b2d-9b1c-3d9d7a7c5c7c - */ - dotted_order?: string; } export interface RunCreate extends BaseRun { + revision_id?: string; child_runs?: this[]; session_name?: string; } export interface RunUpdate { + id?: string; end_time?: number; extra?: KVMap; error?: string; @@ -183,6 +186,22 @@ export interface RunUpdate { reference_example_id?: string; events?: KVMap[]; session_id?: string; + /** Unique ID assigned to every run within this nested trace. **/ + trace_id?: string; + + /** + * The dotted order for the run. + * + * This is a string composed of {time}{run-uuid}.* so that a trace can be + * sorted in the order it was executed. + * + * Example: + * - Parent: 20230914T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8 + * - Children: + * - 20230914T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8.20230914T223155649Z809ed3a2-0172-4f4d-8a02-a64e9b7a0f8a + * - 20230915T223155647Z1b64098b-4ab7-43f6-afee-992304f198d8.20230914T223155650Zc8d9f4c5-6c5a-4b2d-9b1c-3d9d7a7c5c7c + */ + dotted_order?: string; } export interface ExampleCreate extends BaseExample { diff --git a/js/src/tests/batch_client.int.test.ts b/js/src/tests/batch_client.int.test.ts new file mode 100644 index 000000000..bab6aa183 --- /dev/null +++ b/js/src/tests/batch_client.int.test.ts @@ -0,0 +1,164 @@ +import { Client } from "../client.js"; +import { convertToDottedOrderFormat } from "../run_trees.js"; +import { v4 as uuidv4 } from "uuid"; + +async function deleteProject(langchainClient: Client, projectName: string) { + try { + await langchainClient.readProject({ projectName }); + await langchainClient.deleteProject({ projectName }); + } catch (e) { + // Pass + } +} + +async function waitUntil( + condition: () => Promise, + timeout: number, + interval: number +): Promise { + const start = Date.now(); + while (Date.now() - start < timeout) { + if (await condition()) { + return; + } + await new Promise((resolve) => setTimeout(resolve, interval)); + } + throw new Error("Timeout"); +} + +async function waitUntilRunFound( + client: Client, + runId: string, + checkOutputs = false +) { + return waitUntil( + async () => { + try { + const run = await client.readRun(runId); + if (checkOutputs) { + return ( + run.outputs !== null && + run.outputs !== undefined && + Object.keys(run.outputs).length !== 0 + ); + } + return true; + } catch (e) { + return false; + } + }, + 30_000, + 1_000 + ); +} + +test("Test persist update run", async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + }); + const projectName = "__test_persist_update_run_batch"; + await deleteProject(langchainClient, projectName); + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await langchainClient.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + await langchainClient.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + }); + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + await langchainClient.deleteProject({ projectName }); +}); + +test("Test persist update runs above the batch size limit", async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + pendingAutoBatchedRunLimit: 2, + }); + const projectName = "__test_persist_update_run_batch"; + await deleteProject(langchainClient, projectName); + + const createRun = async () => { + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await langchainClient.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + await langchainClient.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + end_time: Math.floor(new Date().getTime() / 1000), + }); + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + }; + + await Promise.all([createRun(), createRun(), createRun()]); + + await langchainClient.deleteProject({ projectName }); +}); + +test("Test persist update run with delay", async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + }); + const projectName = "__test_persist_update_run_batch"; + await deleteProject(langchainClient, projectName); + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await langchainClient.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + await new Promise((resolve) => setTimeout(resolve, 1000)); + await langchainClient.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + end_time: Math.floor(new Date().getTime() / 1000), + }); + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + await langchainClient.deleteProject({ projectName }); +}); diff --git a/js/src/tests/batch_client.test.ts b/js/src/tests/batch_client.test.ts new file mode 100644 index 000000000..48c4e7f06 --- /dev/null +++ b/js/src/tests/batch_client.test.ts @@ -0,0 +1,289 @@ +import { jest } from "@jest/globals"; +import { v4 as uuidv4 } from "uuid"; +import { Client } from "../client.js"; +import { convertToDottedOrderFormat } from "../run_trees.js"; + +describe("Batch client tracing", () => { + it("should create a batched run with the given input", async () => { + const client = new Client({ apiKey: "test-api-key" }); + const callSpy = jest + .spyOn((client as any).caller, "call") + .mockResolvedValue({ + ok: true, + text: () => "", + }); + const projectName = "__test_batch"; + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await client.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + await new Promise((resolve) => setTimeout(resolve, 300)); + + const calledRequestParam: any = callSpy.mock.calls[0][2]; + expect(JSON.parse(calledRequestParam?.body)).toEqual({ + post: [ + expect.objectContaining({ + id: runId, + run_type: "llm", + inputs: { + text: "hello world", + }, + trace_id: runId, + dotted_order: dottedOrder, + }), + ], + patch: [], + }); + + expect(callSpy).toHaveBeenCalledWith( + fetch, + "https://api.smith.langchain.com/runs/batch", + expect.objectContaining({ body: expect.any(String) }) + ); + }); + + it("Create + update batching should merge into a single call", async () => { + const client = new Client({ apiKey: "test-api-key" }); + const callSpy = jest + .spyOn((client as any).caller, "call") + .mockResolvedValue({ + ok: true, + text: () => "", + }); + const projectName = "__test_batch"; + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await client.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + const endTime = Math.floor(new Date().getTime() / 1000); + + await client.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + end_time: endTime, + }); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + const calledRequestParam: any = callSpy.mock.calls[0][2]; + expect(JSON.parse(calledRequestParam?.body)).toEqual({ + post: [ + expect.objectContaining({ + id: runId, + run_type: "llm", + inputs: { + text: "hello world", + }, + outputs: { + output: ["Hi"], + }, + end_time: endTime, + trace_id: runId, + dotted_order: dottedOrder, + }), + ], + patch: [], + }); + + expect(callSpy).toHaveBeenCalledWith( + fetch, + "https://api.smith.langchain.com/runs/batch", + expect.objectContaining({ body: expect.any(String) }) + ); + }); + + it("should create an example with the given input and generation", async () => { + const client = new Client({ apiKey: "test-api-key" }); + const callSpy = jest + .spyOn((client as any).caller, "call") + .mockResolvedValue({ + ok: true, + text: () => "", + }); + const projectName = "__test_batch"; + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await client.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + await new Promise((resolve) => setTimeout(resolve, 300)); + + const endTime = Math.floor(new Date().getTime() / 1000); + + await client.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + end_time: endTime, + }); + + const runId2 = uuidv4(); + const dottedOrder2 = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId2 + ); + + await client.createRun({ + id: runId2, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world 2" }, + trace_id: runId2, + dotted_order: dottedOrder2, + }); + + await new Promise((resolve) => setTimeout(resolve, 300)); + + const calledRequestParam: any = callSpy.mock.calls[0][2]; + const calledRequestParam2: any = callSpy.mock.calls[1][2]; + expect(JSON.parse(calledRequestParam?.body)).toEqual({ + post: [ + expect.objectContaining({ + id: runId, + run_type: "llm", + inputs: { + text: "hello world", + }, + trace_id: runId, + dotted_order: dottedOrder, + }), + ], + patch: [], + }); + + expect(JSON.parse(calledRequestParam2?.body)).toEqual({ + post: [ + expect.objectContaining({ + id: runId2, + run_type: "llm", + inputs: { + text: "hello world 2", + }, + trace_id: runId2, + dotted_order: dottedOrder2, + }), + ], + patch: [ + expect.objectContaining({ + id: runId, + dotted_order: dottedOrder, + trace_id: runId, + end_time: endTime, + outputs: { + output: ["Hi"], + }, + }), + ], + }); + }); + + it("should send traces above the batch size and see even batches", async () => { + const client = new Client({ + apiKey: "test-api-key", + pendingAutoBatchedRunLimit: 10, + }); + const callSpy = jest + .spyOn((client as any).caller, "call") + .mockResolvedValue({ + ok: true, + text: () => "", + }); + const projectName = "__test_batch"; + + const runIds = await Promise.all( + [...Array(15)].map(async (_, i) => { + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await client.createRun({ + id: runId, + project_name: projectName, + name: "test_run " + i, + run_type: "llm", + inputs: { text: "hello world " + i }, + trace_id: runId, + dotted_order: dottedOrder, + }); + return runId; + }) + ); + + await new Promise((resolve) => setTimeout(resolve, 10)); + + const calledRequestParam: any = callSpy.mock.calls[0][2]; + // Second batch should still be pending + expect(callSpy.mock.calls[1]).toBeUndefined(); + // First batch should fire as soon as it hits 10 + expect(JSON.parse(calledRequestParam?.body)).toEqual({ + post: runIds.slice(0, 10).map((runId, i) => + expect.objectContaining({ + id: runId, + run_type: "llm", + inputs: { + text: "hello world " + i, + }, + trace_id: runId, + }) + ), + patch: [], + }); + + // Wait for the aggregation delay + await new Promise((resolve) => setTimeout(resolve, 100)); + + const calledRequestParam2: any = callSpy.mock.calls[1][2]; + + expect(JSON.parse(calledRequestParam2?.body)).toEqual({ + post: runIds.slice(10).map((runId, i) => + expect.objectContaining({ + id: runId, + run_type: "llm", + inputs: { + text: "hello world " + (i + 10), + }, + trace_id: runId, + }) + ), + patch: [], + }); + }); +}); diff --git a/js/src/tests/client.int.test.ts b/js/src/tests/client.int.test.ts index 4d8b371ca..257df3b08 100644 --- a/js/src/tests/client.int.test.ts +++ b/js/src/tests/client.int.test.ts @@ -77,7 +77,7 @@ async function waitUntilRunFound( // Test Dataset Creation, List, Read, Delete + upload CSV // Test Example Creation, List, Read, Update, Delete test.concurrent("Test LangSmith Client Dataset CRD", async () => { - const client = new Client({}); + const client = new Client({ autoBatchTracing: false }); const csvContent = `col1,col2,col3,col4\nval1,val2,val3,val4`; const blobData = new Blob([Buffer.from(csvContent)]); @@ -146,7 +146,7 @@ test.concurrent("Test LangSmith Client Dataset CRD", async () => { test.concurrent( "Test evaluate run", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); const projectName = "__test_evaluate_run" + Date.now(); const datasetName = "__test_evaluate_run_dataset" + Date.now(); @@ -281,7 +281,7 @@ test.concurrent( ); test.concurrent("Test persist update run", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); const projectName = "__test_persist_update_run"; await deleteProject(langchainClient, projectName); @@ -306,7 +306,7 @@ test.concurrent("Test persist update run", async () => { }); test.concurrent("test create dataset", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); const datasetName = "__test_create_dataset"; const datasets = await toArray(langchainClient.listDatasets({ datasetName })); datasets.map(async (dataset: Dataset) => { @@ -330,7 +330,7 @@ test.concurrent("test create dataset", async () => { }); test.concurrent("Test share and unshare run", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); // Create a new run const runId = uuidv4(); @@ -355,7 +355,7 @@ test.concurrent("Test share and unshare run", async () => { test.concurrent( "Test list datasets", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); const datasetName1 = "___TEST dataset1"; const datasetName2 = "___TEST dataset2"; await deleteDataset(langchainClient, datasetName1); @@ -420,7 +420,7 @@ test.concurrent( test.concurrent( "Test create feedback with source run", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); const projectName = "__test_create_feedback_with_source_run"; await deleteProject(langchainClient, projectName); const runId = uuidv4(); @@ -459,7 +459,11 @@ test.concurrent( test.concurrent( "Test create run with masked inputs/outputs", async () => { - const langchainClient = new Client({ hideInputs: true, hideOutputs: true }); + const langchainClient = new Client({ + hideInputs: true, + hideOutputs: true, + autoBatchTracing: false, + }); const projectName = "__test_create_run_with_masked_inputs_outputs"; await deleteProject(langchainClient, projectName); const runId = uuidv4(); @@ -507,7 +511,7 @@ test.concurrent( test.concurrent( "Test create run with revision id", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); // eslint-disable-next-line no-process-env process.env.LANGCHAIN_REVISION_ID = "test_revision_id"; // eslint-disable-next-line no-process-env @@ -564,7 +568,7 @@ test.concurrent( describe("createChatExample", () => { it("should convert LangChainBaseMessage objects to examples", async () => { - const langchainClient = new Client({}); + const langchainClient = new Client({ autoBatchTracing: false }); const datasetName = "__createChatExample-test-dataset"; await deleteDataset(langchainClient, datasetName); @@ -619,7 +623,7 @@ describe("createChatExample", () => { test.concurrent( "Test getRunUrl with run", async () => { - const client = new Client({}); + const client = new Client({ autoBatchTracing: false }); const runId = uuidv4(); const run: Run = { id: runId, @@ -646,7 +650,7 @@ test.concurrent( test.concurrent( "Examples CRUD", async () => { - const client = new Client({}); + const client = new Client({ autoBatchTracing: false }); const datasetName = "__test_examples_crud"; await deleteDataset(client, datasetName); const dataset = await client.createDataset(datasetName); From da4595b88978dccf327f9af669062d21a7051a85 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Wed, 7 Feb 2024 01:09:35 -0800 Subject: [PATCH 03/25] Enable auto batch (#391) Co-authored-by: William FH <13333726+hinthornw@users.noreply.github.com> --- python/langsmith/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 6bde27f0d..b51e0dafe 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -343,7 +343,7 @@ def __init__( timeout_ms: Optional[int] = None, web_url: Optional[str] = None, session: Optional[requests.Session] = None, - auto_batch_tracing: bool = False, + auto_batch_tracing: bool = True, ) -> None: """Initialize a Client instance. From b8973ee9cd8d26514dbd395123fbc0ca3afe2d3d Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Wed, 7 Feb 2024 01:15:43 -0800 Subject: [PATCH 04/25] Py=0.0.87, JS=0.0.67 (#409) --- js/package.json | 4 ++-- js/src/index.ts | 2 +- python/pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/js/package.json b/js/package.json index a98eec527..d2d08883f 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "langsmith", - "version": "0.0.66", + "version": "0.0.67", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", "packageManager": "yarn@1.22.19", "files": [ @@ -122,4 +122,4 @@ }, "./package.json": "./package.json" } -} +} \ No newline at end of file diff --git a/js/src/index.ts b/js/src/index.ts index 07befd8e3..10e7aec78 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -5,4 +5,4 @@ export { Dataset, Example, TracerSession, Run, Feedback } from "./schemas.js"; export { RunTree, RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version -export const __version__ = "0.0.66"; +export const __version__ = "0.0.67"; diff --git a/python/pyproject.toml b/python/pyproject.toml index 326039483..60e5f0634 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.86" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" From d8a68da9e6d46c75eddcb9163561df6c0120a9cf Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:42:24 -0800 Subject: [PATCH 05/25] JS autobatch off (#410) --- js/package.json | 2 +- js/src/client.ts | 2 +- js/src/index.ts | 2 +- js/src/tests/batch_client.test.ts | 16 +++++++++++++--- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/js/package.json b/js/package.json index d2d08883f..ad72d9095 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "langsmith", - "version": "0.0.67", + "version": "0.0.68", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", "packageManager": "yarn@1.22.19", "files": [ diff --git a/js/src/client.ts b/js/src/client.ts index 07ddbd108..b650642c0 100644 --- a/js/src/client.ts +++ b/js/src/client.ts @@ -240,7 +240,7 @@ export class Client { private sampledPostUuids = new Set(); - private autoBatchTracing = true; + private autoBatchTracing = false; private pendingAutoBatchedRuns: AutoBatchQueueItem[] = []; diff --git a/js/src/index.ts b/js/src/index.ts index 10e7aec78..79b99b853 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -5,4 +5,4 @@ export { Dataset, Example, TracerSession, Run, Feedback } from "./schemas.js"; export { RunTree, RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version -export const __version__ = "0.0.67"; +export const __version__ = "0.0.68"; diff --git a/js/src/tests/batch_client.test.ts b/js/src/tests/batch_client.test.ts index 48c4e7f06..36dab304f 100644 --- a/js/src/tests/batch_client.test.ts +++ b/js/src/tests/batch_client.test.ts @@ -5,7 +5,10 @@ import { convertToDottedOrderFormat } from "../run_trees.js"; describe("Batch client tracing", () => { it("should create a batched run with the given input", async () => { - const client = new Client({ apiKey: "test-api-key" }); + const client = new Client({ + apiKey: "test-api-key", + autoBatchTracing: true, + }); const callSpy = jest .spyOn((client as any).caller, "call") .mockResolvedValue({ @@ -55,7 +58,10 @@ describe("Batch client tracing", () => { }); it("Create + update batching should merge into a single call", async () => { - const client = new Client({ apiKey: "test-api-key" }); + const client = new Client({ + apiKey: "test-api-key", + autoBatchTracing: true, + }); const callSpy = jest .spyOn((client as any).caller, "call") .mockResolvedValue({ @@ -118,7 +124,10 @@ describe("Batch client tracing", () => { }); it("should create an example with the given input and generation", async () => { - const client = new Client({ apiKey: "test-api-key" }); + const client = new Client({ + apiKey: "test-api-key", + autoBatchTracing: true, + }); const callSpy = jest .spyOn((client as any).caller, "call") .mockResolvedValue({ @@ -218,6 +227,7 @@ describe("Batch client tracing", () => { const client = new Client({ apiKey: "test-api-key", pendingAutoBatchedRunLimit: 10, + autoBatchTracing: true, }); const callSpy = jest .spyOn((client as any).caller, "call") From c27e0fce969c5328b58a7c6394098ae8a2ca99d5 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Thu, 8 Feb 2024 18:35:49 -0800 Subject: [PATCH 06/25] Fix threadpoole exutor and batching (#412) --- python/README.md | 3 - python/langsmith/client.py | 26 ++- python/langsmith/run_helpers.py | 29 ++- python/langsmith/run_trees.py | 82 ++------ python/langsmith/schemas.py | 13 ++ python/tests/integration_tests/test_client.py | 71 ++++++- python/tests/integration_tests/test_runs.py | 14 +- .../integration_tests/wrappers/test_openai.py | 21 +- python/tests/unit_tests/test_client.py | 147 +++++++------- python/tests/unit_tests/test_run_helpers.py | 179 ++++++++++++------ python/tests/unit_tests/test_run_trees.py | 15 ++ 11 files changed, 362 insertions(+), 238 deletions(-) create mode 100644 python/tests/unit_tests/test_run_trees.py diff --git a/python/README.md b/python/README.md index cd6000770..aa5c22fa5 100644 --- a/python/README.md +++ b/python/README.md @@ -146,10 +146,7 @@ parent_run = RunTree( name="My Chat Bot", run_type="chain", inputs={"text": "Summarize this morning's meetings."}, - serialized={}, # Serialized representation of this chain # project_name= "Defaults to the LANGCHAIN_PROJECT env var" - # api_url= "Defaults to the LANGCHAIN_ENDPOINT env var" - # api_key= "Defaults to the LANGCHAIN_API_KEY env var" ) parent_run.post() # .. My Chat Bot calls an LLM diff --git a/python/langsmith/client.py b/python/langsmith/client.py index b51e0dafe..bba99f74e 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -300,10 +300,11 @@ def _hide_outputs(outputs: Dict[str, Any]) -> Dict[str, Any]: return outputs -def _as_uuid(value: ID_TYPE, var: str) -> uuid.UUID: +def _as_uuid(value: ID_TYPE, var: Optional[str] = None) -> uuid.UUID: try: return uuid.UUID(value) if not isinstance(value, uuid.UUID) else value except ValueError as e: + var = var or "value" raise ls_utils.LangSmithUserError( f"{var} must be a valid UUID or UUID string. Got {value}" ) from e @@ -890,16 +891,17 @@ def _filter_for_sampling( if patch: sampled = [] for run in runs: - if run["id"] in self._sampled_post_uuids: + run_id = _as_uuid(run["id"]) + if run_id in self._sampled_post_uuids: sampled.append(run) - self._sampled_post_uuids.remove(run["id"]) + self._sampled_post_uuids.remove(run_id) return sampled else: sampled = [] for run in runs: if random.random() < self.tracing_sample_rate: sampled.append(run) - self._sampled_post_uuids.add(run["id"]) + self._sampled_post_uuids.add(_as_uuid(run["id"])) return sampled def create_run( @@ -909,7 +911,7 @@ def create_run( run_type: str, *, project_name: Optional[str] = None, - revision_id: Optional[ID_TYPE] = None, + revision_id: Optional[str] = None, **kwargs: Any, ) -> None: """Persist a run to the LangSmith API. @@ -1060,7 +1062,7 @@ def batch_ingest_runs( return self._insert_runtime_env(body["post"]) - + logger.debug(f"Batch ingesting {len(body['post'])}, {len(body['patch'])} runs") self.request_with_retries( "post", f"{self.api_url}/runs/batch", @@ -1085,6 +1087,8 @@ def update_run( inputs: Optional[Dict] = None, outputs: Optional[Dict] = None, events: Optional[Sequence[dict]] = None, + extra: Optional[Dict] = None, + tags: Optional[List[str]] = None, **kwargs: Any, ) -> None: """Update a run in the LangSmith API. @@ -1103,6 +1107,10 @@ def update_run( The output values for the run. events : Sequence[dict] or None, default=None The events for the run. + extra : Dict or None, default=None + The extra information for the run. + tags : List[str] or None, default=None + The tags for the run. **kwargs : Any Kwargs are ignored. """ @@ -1116,11 +1124,15 @@ def update_run( "trace_id": kwargs.pop("trace_id", None), "parent_run_id": kwargs.pop("parent_run_id", None), "dotted_order": kwargs.pop("dotted_order", None), + "tags": tags, + "extra": extra, } if not self._filter_for_sampling([data], patch=True): return if end_time is not None: data["end_time"] = end_time.isoformat() + else: + data["end_time"] = datetime.datetime.utcnow().isoformat() if error is not None: data["error"] = error if inputs is not None: @@ -3294,6 +3306,7 @@ def _tracing_thread_handle_batch( try: client.batch_ingest_runs(create=create, update=update, pre_sampled=True) except Exception: + logger.error("Error in tracing thread", exc_info=True) # exceptions are logged elsewhere, but we need to make sure the # background thread continues to run pass @@ -3332,7 +3345,6 @@ def _tracing_control_thread_func(client_ref: weakref.ref[Client]) -> None: return try: if not client.info: - print(f"no info: {client.info}", file=sys.stderr, flush=True) return except BaseException as e: logger.debug("Error in tracing control thread: %s", e) diff --git a/python/langsmith/run_helpers.py b/python/langsmith/run_helpers.py index bd189f1ac..e7c34d836 100644 --- a/python/langsmith/run_helpers.py +++ b/python/langsmith/run_helpers.py @@ -1,4 +1,5 @@ """Decorator for creating a run tree from functions.""" + from __future__ import annotations import contextlib @@ -8,7 +9,7 @@ import logging import traceback import uuid -from concurrent import futures +import warnings from typing import ( TYPE_CHECKING, Any, @@ -142,7 +143,6 @@ def _setup_run( extra_outer: dict, langsmith_extra: Optional[LangSmithExtra] = None, name: Optional[str] = None, - executor: Optional[futures.ThreadPoolExecutor] = None, metadata: Optional[Mapping[str, Any]] = None, tags: Optional[List[str]] = None, client: Optional[client.Client] = None, @@ -219,7 +219,6 @@ def _setup_run( project_name=project_name_, extra=extra_inner, tags=tags_, - executor=executor, client=client_, ) @@ -262,7 +261,6 @@ def traceable( run_type: str = "chain", *, name: Optional[str] = None, - executor: Optional[futures.ThreadPoolExecutor] = None, metadata: Optional[Mapping[str, Any]] = None, tags: Optional[List[str]] = None, client: Optional[client.Client] = None, @@ -282,8 +280,6 @@ def traceable( run_type: The type of run to create. Examples: llm, chain, tool, prompt, retriever, etc. Defaults to "chain". name: The name of the run. Defaults to the function name. - executor: The thread pool executor to use for the run. Defaults to None, - which will use the default executor. metadata: The metadata to add to the run. Defaults to None. tags: The tags to add to the run. Defaults to None. client: The client to use for logging the run to LangSmith. Defaults to @@ -302,7 +298,6 @@ def traceable( ) extra_outer = kwargs.get("extra") or {} name = kwargs.get("name") - executor = kwargs.get("executor") metadata = kwargs.get("metadata") tags = kwargs.get("tags") client = kwargs.get("client") @@ -323,7 +318,6 @@ async def async_wrapper( langsmith_extra=langsmith_extra, extra_outer=extra_outer, name=name, - executor=executor, metadata=metadata, tags=tags, client=client, @@ -363,7 +357,6 @@ async def async_generator_wrapper( langsmith_extra=langsmith_extra, extra_outer=extra_outer, name=name, - executor=executor, metadata=metadata, tags=tags, client=client, @@ -430,7 +423,6 @@ def wrapper( langsmith_extra=langsmith_extra, extra_outer=extra_outer, name=name, - executor=executor, metadata=metadata, tags=tags, client=client, @@ -470,14 +462,12 @@ def generator_wrapper( langsmith_extra=langsmith_extra, extra_outer=extra_outer, name=name, - executor=executor, metadata=metadata, tags=tags, client=client, args=args, kwargs=kwargs, ) - func_accepts_parent_run = ( inspect.signature(func).parameters.get("run_tree", None) is not None ) @@ -494,7 +484,10 @@ def generator_wrapper( generator_result = func(*args, **kwargs) for item in generator_result: results.append(item) - yield item + try: + yield item + except GeneratorExit: + break except BaseException as e: stacktrace = traceback.format_exc() _container_end(run_container, error=stacktrace) @@ -546,13 +539,20 @@ def trace( *, inputs: Optional[Dict] = None, extra: Optional[Dict] = None, - executor: Optional[futures.ThreadPoolExecutor] = None, project_name: Optional[str] = None, run_tree: Optional[run_trees.RunTree] = None, tags: Optional[List[str]] = None, metadata: Optional[Mapping[str, Any]] = None, + **kwargs: Any, ) -> Generator[run_trees.RunTree, None, None]: """Context manager for creating a run tree.""" + if kwargs: + # In case someone was passing an executor before. + warnings.warn( + "The `trace` context manager no longer supports the following kwargs: " + f"{sorted(kwargs.keys())}.", + DeprecationWarning, + ) outer_tags = _TAGS.get() outer_metadata = _METADATA.get() outer_project = _PROJECT_NAME.get() or utils.get_tracer_project() @@ -581,7 +581,6 @@ def trace( name=name, run_type=run_type, extra=extra_outer, - executor=executor, project_name=project_name_, inputs=inputs or {}, tags=tags_, diff --git a/python/langsmith/run_trees.py b/python/langsmith/run_trees.py index 53bd7d1ad..3769e98ac 100644 --- a/python/langsmith/run_trees.py +++ b/python/langsmith/run_trees.py @@ -1,22 +1,20 @@ """Schemas for the LangSmith API.""" + from __future__ import annotations import logging -import warnings -from concurrent.futures import Future, ThreadPoolExecutor, wait from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, cast +from typing import Dict, List, Optional, cast from uuid import UUID, uuid4 try: from pydantic.v1 import ( # type: ignore[import] Field, - PrivateAttr, root_validator, validator, ) except ImportError: - from pydantic import Field, PrivateAttr, root_validator, validator + from pydantic import Field, root_validator, validator from langsmith import utils from langsmith.client import ID_TYPE, Client @@ -25,16 +23,12 @@ logger = logging.getLogger(__name__) -def _make_thread_pool() -> ThreadPoolExecutor: - """Ensure a thread pool exists in the current context.""" - return ThreadPoolExecutor(max_workers=1) - - class RunTree(RunBase): """Run Schema with back-references for posting runs.""" name: str id: UUID = Field(default_factory=uuid4) + run_type: str = Field(default="chain") start_time: datetime = Field(default_factory=datetime.utcnow) parent_run: Optional[RunTree] = Field(default=None, exclude=True) child_runs: List[RunTree] = Field( @@ -48,10 +42,6 @@ class RunTree(RunBase): session_id: Optional[UUID] = Field(default=None, alias="project_id") extra: Dict = Field(default_factory=dict) client: Client = Field(default_factory=Client, exclude=True) - executor: ThreadPoolExecutor = Field( - default_factory=_make_thread_pool, exclude=True - ) - _futures: List[Future] = PrivateAttr(default_factory=list) dotted_order: str = Field( default="", description="The order of the run in the tree." ) @@ -60,15 +50,7 @@ class RunTree(RunBase): class Config: arbitrary_types_allowed = True allow_population_by_field_name = True - - @validator("executor", pre=True) - def validate_executor(cls, v: Optional[ThreadPoolExecutor]) -> ThreadPoolExecutor: - """Ensure the executor is running.""" - if v is None: - return _make_thread_pool() - if v._shutdown: - raise ValueError("Executor has been shutdown.") - return v + extra = "allow" @validator("client", pre=True) def validate_client(cls, v: Optional[Client]) -> Client: @@ -159,19 +141,11 @@ def create_child( parent_run=self, session_name=self.session_name, client=self.client, - executor=self.executor, tags=tags, ) self.child_runs.append(run) return run - def _execute(self, func: Callable, *args: Any, **kwargs: Any) -> Any: - try: - return func(*args, **kwargs) - except Exception as e: - logger.exception(e) - raise e - def _get_dicts_safe(self): try: return self.dict(exclude={"child_runs"}, exclude_none=True) @@ -188,45 +162,27 @@ def _get_dicts_safe(self): self_dict["outputs"] = self.outputs.copy() return self_dict - def post(self, exclude_child_runs: bool = True) -> Future: + def post(self, exclude_child_runs: bool = True) -> None: """Post the run tree to the API asynchronously.""" kwargs = self._get_dicts_safe() - self._futures.append( - self.executor.submit( - self._execute, - self.client.create_run, - **kwargs, - ) - ) + self.client.create_run(**kwargs) if not exclude_child_runs: - warnings.warn( - "Posting with exclude_child_runs=False is deprecated" - " and will be removed in a future version.", - DeprecationWarning, - ) for child_run in self.child_runs: - self._futures.append(child_run.post(exclude_child_runs=False)) - return self._futures[-1] + child_run.post(exclude_child_runs=False) - def patch(self) -> Future: + def patch(self) -> None: """Patch the run tree to the API in a background thread.""" - self._futures.append( - self.executor.submit( - self._execute, - self.client.update_run, - run_id=self.id, - outputs=self.outputs.copy() if self.outputs else None, - error=self.error, - parent_run_id=self.parent_run_id, - reference_example_id=self.reference_example_id, - end_time=self.end_time, - ) + self.client.update_run( + run_id=self.id, + outputs=self.outputs.copy() if self.outputs else None, + error=self.error, + parent_run_id=self.parent_run_id, + reference_example_id=self.reference_example_id, + end_time=self.end_time, + dotted_order=self.dotted_order, + trace_id=self.trace_id, ) - return self._futures[-1] def wait(self) -> None: """Wait for all _futures to complete.""" - futures = self._futures - wait(self._futures) - for future in futures: - self._futures.remove(future) + pass diff --git a/python/langsmith/schemas.py b/python/langsmith/schemas.py index 7ca4c8baf..de130eb9f 100644 --- a/python/langsmith/schemas.py +++ b/python/langsmith/schemas.py @@ -1,4 +1,5 @@ """Schemas for the LangSmith API.""" + from __future__ import annotations from datetime import datetime, timedelta @@ -290,6 +291,18 @@ def url(self) -> Optional[str]: return f"{self._host_url}{self.app_path}" return None + @property + def metadata(self) -> dict[str, Any]: + """Retrieve the metadata (if any).""" + if self.extra is None or "metadata" not in self.extra: + return {} + return self.extra["metadata"] + + @property + def revision_id(self) -> Optional[UUID]: + """Retrieve the revision ID (if any).""" + return self.metadata.get("revision_id") + class RunLikeDict(TypedDict, total=False): """Run-like dictionary, for type-hinting.""" diff --git a/python/tests/integration_tests/test_client.py b/python/tests/integration_tests/test_client.py index 369861952..e2196d4f0 100644 --- a/python/tests/integration_tests/test_client.py +++ b/python/tests/integration_tests/test_client.py @@ -1,12 +1,13 @@ """LangSmith langchain_client Integration Tests.""" +import functools import io import os import random import string import time from datetime import datetime, timedelta -from typing import cast +from typing import Any, Callable, Dict, cast from uuid import uuid4 import pytest @@ -21,6 +22,18 @@ ) +def wait_for( + condition: Callable[[], bool], max_attempts: int = 20, sleep_time: int = 3 +): + for _ in range(max_attempts): + try: + if condition(): + return + except Exception: + time.sleep(sleep_time) + raise ValueError("Callable did not return in time") + + @pytest.fixture def langchain_client(monkeypatch: pytest.MonkeyPatch) -> Client: monkeypatch.setenv("LANGCHAIN_ENDPOINT", "https://api.smith.langchain.com") @@ -428,3 +441,59 @@ def test_get_info() -> None: assert info.version is not None # type: ignore assert info.batch_ingest_config is not None # type: ignore assert info.batch_ingest_config["size_limit"] > 0 # type: ignore + + +@pytest.mark.parametrize("add_metadata", [True, False]) +@pytest.mark.parametrize("do_batching", [True, False]) +def test_update_run_extra(add_metadata: bool, do_batching: bool) -> None: + langchain_client = Client() + run_id = uuid4() + run: Dict[str, Any] = { + "id": run_id, + "name": "run 1", + "start_time": datetime.utcnow(), + "run_type": "chain", + "inputs": {"input1": 1, "input2": 2}, + "outputs": {"output1": 3, "output2": 4}, + "extra": { + "metadata": { + "foo": "bar", + } + }, + "tags": ["tag1", "tag2"], + } + if do_batching: + run["trace_id"] = run_id + dotted_order = run["start_time"].strftime("%Y%m%dT%H%M%S%fZ") + str(run_id) # type: ignore + run["dotted_order"] = dotted_order + revision_id = uuid4() + langchain_client.create_run(**run, revision_id=revision_id) # type: ignore + + def _get_run(has_end: bool = False) -> bool: + try: + r = langchain_client.read_run(run["id"]) # type: ignore + if has_end: + return r.end_time is not None + return True + except LangSmithError: + return False + + wait_for(_get_run) + created_run = langchain_client.read_run(run_id) + assert created_run.metadata["foo"] == "bar" + assert created_run.metadata["revision_id"] == str(revision_id) + # Update the run + if add_metadata: + run["extra"]["metadata"]["foo2"] = "baz" # type: ignore + run["tags"] = ["tag3"] + langchain_client.update_run(run_id, **run) # type: ignore + wait_for(functools.partial(_get_run, has_end=True)) + updated_run = langchain_client.read_run(run_id) + assert updated_run.metadata["foo"] == "bar", updated_run.metadata # type: ignore + assert updated_run.revision_id == str(revision_id), updated_run.metadata + if add_metadata: + assert updated_run.metadata["foo2"] == "baz", updated_run.metadata # type: ignore + assert updated_run.tags == ["tag3"] + else: + assert updated_run.tags == ["tag1", "tag2"] + assert updated_run.extra["runtime"] == created_run.extra["runtime"] # type: ignore diff --git a/python/tests/integration_tests/test_runs.py b/python/tests/integration_tests/test_runs.py index 20b58a251..b23451c76 100644 --- a/python/tests/integration_tests/test_runs.py +++ b/python/tests/integration_tests/test_runs.py @@ -55,6 +55,8 @@ def test_nested_runs( if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) + # We don't require this anymore but we'll use it + # to keep testing for backwards compatibility executor = ThreadPoolExecutor(max_workers=1) @traceable(run_type="chain") @@ -66,7 +68,7 @@ def my_run(text: str): def my_llm_run(text: str): return f"Completed: {text}" - @traceable(run_type="chain", executor=executor, tags=["foo", "bar"]) + @traceable(run_type="chain", executor=executor, tags=["foo", "bar"]) # type: ignore def my_chain_run(text: str): return my_run(text) @@ -120,7 +122,7 @@ async def my_llm_run(text: str): def my_sync_tool(text: str, *, my_arg: int = 10): return f"Completed: {text} {my_arg}" - @traceable(run_type="chain", executor=executor) + @traceable(run_type="chain") # type: ignore async def my_chain_run(text: str): return await my_run(text) @@ -179,7 +181,7 @@ def my_run(text: str, *, run_tree: Optional[RunTree] = None): executor = ThreadPoolExecutor(max_workers=1) - @traceable(run_type="chain", executor=executor) + @traceable(run_type="chain", executor=executor) # type: ignore async def my_chain_run(text: str, run_tree: RunTree): thread_pool = ThreadPoolExecutor(max_workers=3) for i in range(2): @@ -226,10 +228,7 @@ async def test_context_manager(langchain_client: Client) -> None: async def my_llm(prompt: str) -> str: return f"LLM {prompt}" - executor = ThreadPoolExecutor(max_workers=1) - with trace( - "my_context", "chain", project_name=project_name, executor=executor - ) as run_tree: + with trace("my_context", "chain", project_name=project_name) as run_tree: await my_llm("foo") with trace("my_context2", "chain", run_tree=run_tree) as run_tree2: runs = [my_llm("baz"), my_llm("qux")] @@ -238,7 +237,6 @@ async def my_llm(prompt: str) -> str: await my_llm("corge") await asyncio.gather(*runs) run_tree.end(outputs={"End val": "my_context2"}) - executor.shutdown(wait=True) poll_runs_until_count(langchain_client, project_name, 8) runs = list(langchain_client.list_runs(project_name=project_name)) assert len(runs) == 8 diff --git a/python/tests/integration_tests/wrappers/test_openai.py b/python/tests/integration_tests/wrappers/test_openai.py index 46cc988ac..da41ed67d 100644 --- a/python/tests/integration_tests/wrappers/test_openai.py +++ b/python/tests/integration_tests/wrappers/test_openai.py @@ -41,9 +41,8 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): assert original.choices == patched.choices # Give the thread a chance. time.sleep(0.01) - assert mock_session.return_value.request.call_count == 2 - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "POST" - assert mock_session.return_value.request.call_args_list[1][0][0].upper() == "PATCH" + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" @mock.patch("langsmith.client.requests.Session") @@ -76,9 +75,8 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): assert original.choices == patched.choices # Give the thread a chance. time.sleep(0.1) - assert mock_session.return_value.request.call_count == 2 - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "POST" - assert mock_session.return_value.request.call_args_list[1][0][0].upper() == "PATCH" + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" @mock.patch("langsmith.client.requests.Session") @@ -117,9 +115,8 @@ def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): assert original.choices == patched.choices # Give the thread a chance. time.sleep(0.1) - assert mock_session.return_value.request.call_count == 2 - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "POST" - assert mock_session.return_value.request.call_args_list[1][0][0].upper() == "PATCH" + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" @mock.patch("langsmith.client.requests.Session") @@ -162,6 +159,6 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) assert original.choices == patched.choices # Give the thread a chance. time.sleep(0.1) - assert mock_session.return_value.request.call_count == 2 - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "POST" - assert mock_session.return_value.request.call_args_list[1][0][0].upper() == "PATCH" + assert mock_session.return_value.request.call_count >= 1 + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" diff --git a/python/tests/unit_tests/test_client.py b/python/tests/unit_tests/test_client.py index 19e264fd6..da7c5b24c 100644 --- a/python/tests/unit_tests/test_client.py +++ b/python/tests/unit_tests/test_client.py @@ -189,7 +189,6 @@ def test_get_api_url() -> None: def test_create_run_unicode() -> None: - client = Client(api_url="http://localhost:1984", api_key="123") inputs = { "foo": "これは私の友達です", "bar": "این یک کتاب است", @@ -199,10 +198,10 @@ def test_create_run_unicode() -> None: } session = mock.Mock() session.request = mock.Mock() - with patch.object(client, "session", session): - id_ = uuid.uuid4() - client.create_run("my_run", inputs=inputs, run_type="llm", id=id_) - client.update_run(id_, status="completed") + client = Client(api_url="http://localhost:1984", api_key="123", session=session) + id_ = uuid.uuid4() + client.create_run("my_run", inputs=inputs, run_type="llm", id=id_) + client.update_run(id_, status="completed") class CallTracker: @@ -420,8 +419,8 @@ def mock_get(*args, **kwargs): @pytest.mark.parametrize("source_type", ["api", "model"]) def test_create_feedback_string_source_type(source_type: str) -> None: - client = Client(api_url="http://localhost:1984", api_key="123") session = mock.Mock() + client = Client(api_url="http://localhost:1984", api_key="123", session=session) request_object = mock.Mock() request_object.json.return_value = { "id": uuid.uuid4(), @@ -431,13 +430,12 @@ def test_create_feedback_string_source_type(source_type: str) -> None: "run_id": uuid.uuid4(), } session.post.return_value = request_object - with patch.object(client, "session", session): - id_ = uuid.uuid4() - client.create_feedback( - id_, - key="Foo", - feedback_source_type=source_type, - ) + id_ = uuid.uuid4() + client.create_feedback( + id_, + key="Foo", + feedback_source_type=source_type, + ) def test_pydantic_serialize() -> None: @@ -623,7 +621,8 @@ class MyNamedTuple(NamedTuple): assert res[k] == v -def test_host_url() -> None: +@patch("langsmith.client.requests.Session", autospec=True) +def test_host_url(_: MagicMock) -> None: client = Client(api_url="https://api.foobar.com/api", api_key="API_KEY") assert client._host_url == "https://api.foobar.com" @@ -645,85 +644,81 @@ def test_host_url() -> None: @patch("langsmith.client.time.sleep") -def test_retry_on_connection_error(mock_sleep): - client = Client(api_key="test") - with patch.object(client, "session") as mock_session: - mock_session.request.side_effect = requests.ConnectionError() - - with pytest.raises(ls_utils.LangSmithConnectionError): - client.request_with_retries( - "GET", "https://test.url", {}, stop_after_attempt=2 - ) - assert mock_session.request.call_count == 2 +def test_retry_on_connection_error(mock_sleep: MagicMock): + mock_session = MagicMock() + client = Client(api_key="test", session=mock_session) + mock_session.request.side_effect = requests.ConnectionError() + + with pytest.raises(ls_utils.LangSmithConnectionError): + client.request_with_retries("GET", "https://test.url", {}, stop_after_attempt=2) + assert mock_session.request.call_count == 2 @patch("langsmith.client.time.sleep") def test_http_status_500_handling(mock_sleep): - client = Client(api_key="test") - with patch.object(client, "session") as mock_session: - mock_response = MagicMock() - mock_response.status_code = 500 - mock_response.raise_for_status.side_effect = HTTPError() - mock_session.request.return_value = mock_response - - with pytest.raises(ls_utils.LangSmithAPIError): - client.request_with_retries( - "GET", "https://test.url", {}, stop_after_attempt=2 - ) - assert mock_session.request.call_count == 2 + mock_session = MagicMock() + client = Client(api_key="test", session=mock_session) + mock_response = MagicMock() + mock_response.status_code = 500 + mock_response.raise_for_status.side_effect = HTTPError() + mock_session.request.return_value = mock_response + + with pytest.raises(ls_utils.LangSmithAPIError): + client.request_with_retries("GET", "https://test.url", {}, stop_after_attempt=2) + assert mock_session.request.call_count == 2 @patch("langsmith.client.time.sleep") def test_pass_on_409_handling(mock_sleep): - client = Client(api_key="test") - with patch.object(client, "session") as mock_session: - mock_response = MagicMock() - mock_response.status_code = 409 - mock_response.raise_for_status.side_effect = HTTPError() - mock_session.request.return_value = mock_response - - response = client.request_with_retries( - "GET", - "https://test.url", - {}, - stop_after_attempt=5, - to_ignore=[ls_utils.LangSmithConflictError], - ) - assert mock_session.request.call_count == 1 - assert response == mock_response + mock_session = MagicMock() + client = Client(api_key="test", session=mock_session) + mock_response = MagicMock() + mock_response.status_code = 409 + mock_response.raise_for_status.side_effect = HTTPError() + mock_session.request.return_value = mock_response + + response = client.request_with_retries( + "GET", + "https://test.url", + {}, + stop_after_attempt=5, + to_ignore=[ls_utils.LangSmithConflictError], + ) + assert mock_session.request.call_count == 1 + assert response == mock_response @patch("langsmith.client.ls_utils.raise_for_status_with_text") def test_http_status_429_handling(mock_raise_for_status): - client = Client(api_key="test") - with patch.object(client, "session") as mock_session: - mock_response = MagicMock() - mock_response.status_code = 429 - mock_session.request.return_value = mock_response - mock_raise_for_status.side_effect = HTTPError() - with pytest.raises(ls_utils.LangSmithRateLimitError): - client.request_with_retries("GET", "https://test.url", {}) + mock_session = MagicMock() + client = Client(api_key="test", session=mock_session) + mock_response = MagicMock() + mock_response.status_code = 429 + mock_session.request.return_value = mock_response + mock_raise_for_status.side_effect = HTTPError() + with pytest.raises(ls_utils.LangSmithRateLimitError): + client.request_with_retries("GET", "https://test.url", {}) @patch("langsmith.client.ls_utils.raise_for_status_with_text") def test_http_status_401_handling(mock_raise_for_status): - client = Client(api_key="test") - with patch.object(client, "session") as mock_session: - mock_response = MagicMock() - mock_response.status_code = 401 - mock_session.request.return_value = mock_response - mock_raise_for_status.side_effect = HTTPError() - with pytest.raises(ls_utils.LangSmithAuthError): - client.request_with_retries("GET", "https://test.url", {}) + mock_session = MagicMock() + client = Client(api_key="test", session=mock_session) + mock_response = MagicMock() + mock_response.status_code = 401 + mock_session.request.return_value = mock_response + mock_raise_for_status.side_effect = HTTPError() + with pytest.raises(ls_utils.LangSmithAuthError): + client.request_with_retries("GET", "https://test.url", {}) @patch("langsmith.client.ls_utils.raise_for_status_with_text") def test_http_status_404_handling(mock_raise_for_status): - client = Client(api_key="test") - with patch.object(client, "session") as mock_session: - mock_response = MagicMock() - mock_response.status_code = 404 - mock_session.request.return_value = mock_response - mock_raise_for_status.side_effect = HTTPError() - with pytest.raises(ls_utils.LangSmithNotFoundError): - client.request_with_retries("GET", "https://test.url", {}) + mock_session = MagicMock() + client = Client(api_key="test", session=mock_session) + mock_response = MagicMock() + mock_response.status_code = 404 + mock_session.request.return_value = mock_response + mock_raise_for_status.side_effect = HTTPError() + with pytest.raises(ls_utils.LangSmithNotFoundError): + client.request_with_retries("GET", "https://test.url", {}) diff --git a/python/tests/unit_tests/test_run_helpers.py b/python/tests/unit_tests/test_run_helpers.py index 3d9331d75..1a21fabae 100644 --- a/python/tests/unit_tests/test_run_helpers.py +++ b/python/tests/unit_tests/test_run_helpers.py @@ -1,7 +1,15 @@ +import asyncio import functools import inspect +import json +import os +import time from typing import Any +from unittest.mock import MagicMock, patch +import pytest + +from langsmith import Client from langsmith.run_helpers import ( _get_inputs, as_runnable, @@ -149,26 +157,82 @@ def foo(kwargs: int, *, b: int, c: int, **some_other_kwargs: Any) -> None: } -def test_traceable_iterator() -> None: - @traceable() - def my_iterator_fn(a, b, d): - for i in range(a + b + d): - yield i - - assert list(my_iterator_fn(1, 2, 3)) == [0, 1, 2, 3, 4, 5] - - -async def test_traceable_async_iterator() -> None: - @traceable() - async def my_iterator_fn(a, b, d): - for i in range(a + b + d): - yield i - - assert [i async for i in my_iterator_fn(1, 2, 3)] == [0, 1, 2, 3, 4, 5] - - -def test_traceable_iterator_noargs() -> None: - # Check that it's callable without the parens +@pytest.fixture +def mock_client() -> Client: + mock_session = MagicMock() + client = Client(session=mock_session, api_key="test") + return client + + +@pytest.mark.parametrize("use_next", [True, False]) +def test_traceable_iterator(use_next: bool, mock_client: Client) -> None: + with patch.dict(os.environ, {"LANGCHAIN_TRACING_V2": "true"}): + + @traceable(client=mock_client) + def my_iterator_fn(a, b, d): + for i in range(a + b + d): + yield i + + expected = [0, 1, 2, 3, 4, 5] + genout = my_iterator_fn(1, 2, 3) + if use_next: + results = [] + while True: + try: + results.append(next(genout)) + except StopIteration: + break + else: + results = list(genout) + assert results == expected + # Wait for batcher + time.sleep(0.1) + # check the mock_calls + mock_calls = mock_client.session.request.mock_calls # type: ignore + assert 1 <= len(mock_calls) <= 2 + + call = mock_calls[0] + assert call.args[0] == "post" + assert call.args[1].startswith("https://api.smith.langchain.com") + body = json.loads(mock_calls[0].kwargs["data"]) + assert body["post"] + assert body["post"][0]["outputs"]["output"] == expected + + +@pytest.mark.parametrize("use_next", [True, False]) +async def test_traceable_async_iterator(use_next: bool, mock_client: Client) -> None: + with patch.dict(os.environ, {"LANGCHAIN_TRACING_V2": "true"}): + + @traceable(client=mock_client) + async def my_iterator_fn(a, b, d): + for i in range(a + b + d): + yield i + + expected = [0, 1, 2, 3, 4, 5] + genout = my_iterator_fn(1, 2, 3) + if use_next: + results = [] + async for item in genout: + results.append(item) + else: + results = [item async for item in genout] + assert results == expected + # Wait for batcher + await asyncio.sleep(0.1) + # check the mock_calls + mock_calls = mock_client.session.request.mock_calls # type: ignore + assert 1 <= len(mock_calls) <= 2 + + call = mock_calls[0] + assert call.args[0] == "post" + assert call.args[1].startswith("https://api.smith.langchain.com") + body = json.loads(call.kwargs["data"]) + assert body["post"] + assert body["post"][0]["outputs"]["output"] == expected + + +@patch("langsmith.run_trees.Client", autospec=True) +def test_traceable_iterator_noargs(_: MagicMock) -> None: @traceable def my_iterator_fn(a, b, d): for i in range(a + b + d): @@ -177,7 +241,8 @@ def my_iterator_fn(a, b, d): assert list(my_iterator_fn(1, 2, 3)) == [0, 1, 2, 3, 4, 5] -async def test_traceable_async_iterator_noargs() -> None: +@patch("langsmith.run_trees.Client", autospec=True) +async def test_traceable_async_iterator_noargs(_: MagicMock) -> None: # Check that it's callable without the parens @traceable async def my_iterator_fn(a, b, d): @@ -187,64 +252,72 @@ async def my_iterator_fn(a, b, d): assert [i async for i in my_iterator_fn(1, 2, 3)] == [0, 1, 2, 3, 4, 5] -def test_as_runnable() -> None: - @traceable() +@patch("langsmith.client.requests.Session", autospec=True) +def test_as_runnable(_: MagicMock, mock_client: Client) -> None: + @traceable(client=mock_client) def my_function(a, b, d): return a + b + d - runnable = as_runnable(my_function) - assert runnable.invoke({"a": 1, "b": 2, "d": 3}) == 6 + with patch.dict(os.environ, {"LANGCHAIN_TRACING_V2": "false"}): + runnable = as_runnable(my_function) + assert runnable.invoke({"a": 1, "b": 2, "d": 3}) == 6 -def test_as_runnable_batch() -> None: - @traceable() +@patch("langsmith.client.requests.Session", autospec=True) +def test_as_runnable_batch(mock_client: Client) -> None: + @traceable(client=mock_client) def my_function(a, b, d): return a + b + d - runnable = as_runnable(my_function) - assert runnable.batch( - [ - {"a": 1, "b": 2, "d": 3}, - {"a": 1, "b": 2, "d": 4}, - ] - ) == [6, 7] + with patch.dict(os.environ, {"LANGCHAIN_TRACING_V2": "false"}): + runnable = as_runnable(my_function) + assert runnable.batch( + [ + {"a": 1, "b": 2, "d": 3}, + {"a": 1, "b": 2, "d": 4}, + ] + ) == [6, 7] -async def test_as_runnable_async() -> None: +@patch("langsmith.client.requests.Session", autospec=True) +async def test_as_runnable_async(_: MagicMock) -> None: @traceable() async def my_function(a, b, d): return a + b + d runnable = as_runnable(my_function) - result = await runnable.ainvoke({"a": 1, "b": 2, "d": 3}) - assert result == 6 + with patch.dict(os.environ, {"LANGCHAIN_TRACING_V2": "false"}): + result = await runnable.ainvoke({"a": 1, "b": 2, "d": 3}) + assert result == 6 -async def test_as_runnable_async_batch() -> None: +@patch("langsmith.client.requests.Session", autospec=True) +async def test_as_runnable_async_batch(_: MagicMock) -> None: @traceable() async def my_function(a, b, d): return a + b + d runnable = as_runnable(my_function) - result = await runnable.abatch( - [ - {"a": 1, "b": 2, "d": 3}, - {"a": 1, "b": 2, "d": 4}, - ] - ) - assert result == [6, 7] - - -def test_is_traceable_function() -> None: - @traceable() + with patch.dict(os.environ, {"LANGCHAIN_TRACING_V2": "false"}): + result = await runnable.abatch( + [ + {"a": 1, "b": 2, "d": 3}, + {"a": 1, "b": 2, "d": 4}, + ] + ) + assert result == [6, 7] + + +def test_is_traceable_function(mock_client: Client) -> None: + @traceable(client=mock_client) def my_function(a: int, b: int, d: int) -> int: return a + b + d assert is_traceable_function(my_function) -def test_is_traceable_partial_function() -> None: - @traceable() +def test_is_traceable_partial_function(mock_client: Client) -> None: + @traceable(client=mock_client) def my_function(a: int, b: int, d: int) -> int: return a + b + d @@ -260,9 +333,9 @@ def my_function(a: int, b: int, d: int) -> int: assert not is_traceable_function(my_function) -def test_is_traceable_class_call() -> None: +def test_is_traceable_class_call(mock_client: Client) -> None: class Foo: - @traceable() + @traceable(client=mock_client) def __call__(self, a: int, b: int) -> None: pass diff --git a/python/tests/unit_tests/test_run_trees.py b/python/tests/unit_tests/test_run_trees.py new file mode 100644 index 000000000..fa88e623c --- /dev/null +++ b/python/tests/unit_tests/test_run_trees.py @@ -0,0 +1,15 @@ +from concurrent.futures import ThreadPoolExecutor +from unittest.mock import MagicMock + +from langsmith import run_trees +from langsmith.client import Client + + +def test_run_tree_accepts_tpe() -> None: + mock_client = MagicMock(spec=Client) + run_trees.RunTree( + name="My Chat Bot", + inputs={"text": "Summarize this morning's meetings."}, + client=mock_client, + executor=ThreadPoolExecutor(), + ) From f7657d96c078ba3c748ac14863e050642af850a2 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Thu, 8 Feb 2024 18:36:59 -0800 Subject: [PATCH 07/25] Improve logging (#413) --- python/langsmith/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/langsmith/client.py b/python/langsmith/client.py index bba99f74e..67158485c 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -3306,7 +3306,6 @@ def _tracing_thread_handle_batch( try: client.batch_ingest_runs(create=create, update=update, pre_sampled=True) except Exception: - logger.error("Error in tracing thread", exc_info=True) # exceptions are logged elsewhere, but we need to make sure the # background thread continues to run pass From c6dfe3103451b6a0c7ce11898a150aa1281a1567 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 01:37:06 -0800 Subject: [PATCH 08/25] increase timeout (#414) --- js/src/tests/batch_client.int.test.ts | 174 +++++------ js/src/tests/client.int.test.ts | 274 ++++-------------- python/tests/integration_tests/test_client.py | 192 ++++++------ 3 files changed, 240 insertions(+), 400 deletions(-) diff --git a/js/src/tests/batch_client.int.test.ts b/js/src/tests/batch_client.int.test.ts index bab6aa183..0a44dca6f 100644 --- a/js/src/tests/batch_client.int.test.ts +++ b/js/src/tests/batch_client.int.test.ts @@ -52,50 +52,16 @@ async function waitUntilRunFound( ); } -test("Test persist update run", async () => { - const langchainClient = new Client({ - autoBatchTracing: true, - callerOptions: { maxRetries: 0 }, - }); - const projectName = "__test_persist_update_run_batch"; - await deleteProject(langchainClient, projectName); - - const runId = uuidv4(); - const dottedOrder = convertToDottedOrderFormat( - new Date().getTime() / 1000, - runId - ); - await langchainClient.createRun({ - id: runId, - project_name: projectName, - name: "test_run", - run_type: "llm", - inputs: { text: "hello world" }, - trace_id: runId, - dotted_order: dottedOrder, - }); - - await langchainClient.updateRun(runId, { - outputs: { output: ["Hi"] }, - dotted_order: dottedOrder, - trace_id: runId, - }); - await waitUntilRunFound(langchainClient, runId, true); - const storedRun = await langchainClient.readRun(runId); - expect(storedRun.id).toEqual(runId); - await langchainClient.deleteProject({ projectName }); -}); - -test("Test persist update runs above the batch size limit", async () => { - const langchainClient = new Client({ - autoBatchTracing: true, - callerOptions: { maxRetries: 0 }, - pendingAutoBatchedRunLimit: 2, - }); - const projectName = "__test_persist_update_run_batch"; - await deleteProject(langchainClient, projectName); +test.concurrent( + "Test persist update run", + async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + }); + const projectName = "__test_persist_update_run_batch_1"; + await deleteProject(langchainClient, projectName); - const createRun = async () => { const runId = uuidv4(); const dottedOrder = convertToDottedOrderFormat( new Date().getTime() / 1000, @@ -115,50 +81,96 @@ test("Test persist update runs above the batch size limit", async () => { outputs: { output: ["Hi"] }, dotted_order: dottedOrder, trace_id: runId, - end_time: Math.floor(new Date().getTime() / 1000), }); await waitUntilRunFound(langchainClient, runId, true); const storedRun = await langchainClient.readRun(runId); expect(storedRun.id).toEqual(runId); - }; + await langchainClient.deleteProject({ projectName }); + }, + 180_000 +); - await Promise.all([createRun(), createRun(), createRun()]); +test.concurrent( + "Test persist update runs above the batch size limit", + async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + pendingAutoBatchedRunLimit: 2, + }); + const projectName = "__test_persist_update_run_batch_above_bs_limit"; + await deleteProject(langchainClient, projectName); - await langchainClient.deleteProject({ projectName }); -}); + const createRun = async () => { + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await langchainClient.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); -test("Test persist update run with delay", async () => { - const langchainClient = new Client({ - autoBatchTracing: true, - callerOptions: { maxRetries: 0 }, - }); - const projectName = "__test_persist_update_run_batch"; - await deleteProject(langchainClient, projectName); + await langchainClient.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + end_time: Math.floor(new Date().getTime() / 1000), + }); + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + }; - const runId = uuidv4(); - const dottedOrder = convertToDottedOrderFormat( - new Date().getTime() / 1000, - runId - ); - await langchainClient.createRun({ - id: runId, - project_name: projectName, - name: "test_run", - run_type: "llm", - inputs: { text: "hello world" }, - trace_id: runId, - dotted_order: dottedOrder, - }); + await Promise.all([createRun(), createRun(), createRun()]); + + await langchainClient.deleteProject({ projectName }); + }, + 180_000 +); + +test.concurrent( + "Test persist update run with delay", + async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + }); + const projectName = "__test_persist_update_run_batch_with_delay"; + await deleteProject(langchainClient, projectName); + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await langchainClient.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); - await new Promise((resolve) => setTimeout(resolve, 1000)); - await langchainClient.updateRun(runId, { - outputs: { output: ["Hi"] }, - dotted_order: dottedOrder, - trace_id: runId, - end_time: Math.floor(new Date().getTime() / 1000), - }); - await waitUntilRunFound(langchainClient, runId, true); - const storedRun = await langchainClient.readRun(runId); - expect(storedRun.id).toEqual(runId); - await langchainClient.deleteProject({ projectName }); -}); + await new Promise((resolve) => setTimeout(resolve, 1000)); + await langchainClient.updateRun(runId, { + outputs: { output: ["Hi"] }, + dotted_order: dottedOrder, + trace_id: runId, + end_time: Math.floor(new Date().getTime() / 1000), + }); + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + await langchainClient.deleteProject({ projectName }); + }, + 180_000 +); diff --git a/js/src/tests/client.int.test.ts b/js/src/tests/client.int.test.ts index 257df3b08..b632674b6 100644 --- a/js/src/tests/client.int.test.ts +++ b/js/src/tests/client.int.test.ts @@ -1,9 +1,7 @@ -import { Dataset, Feedback, Run } from "../schemas.js"; +import { Dataset, Run } from "../schemas.js"; import { FunctionMessage, HumanMessage } from "langchain/schema"; -import { RunTree, RunTreeConfig } from "../run_trees.js"; import { Client } from "../client.js"; -import { StringEvaluator } from "../evaluation/string_evaluator.js"; import { v4 as uuidv4 } from "uuid"; async function toArray(iterable: AsyncIterable): Promise { @@ -69,7 +67,7 @@ async function waitUntilRunFound( return false; } }, - 60_000, + 180_000, 1_000 ); } @@ -144,214 +142,62 @@ test.concurrent("Test LangSmith Client Dataset CRD", async () => { }); test.concurrent( - "Test evaluate run", + "test create dataset", async () => { const langchainClient = new Client({ autoBatchTracing: false }); - - const projectName = "__test_evaluate_run" + Date.now(); - const datasetName = "__test_evaluate_run_dataset" + Date.now(); - await deleteProject(langchainClient, projectName); - await deleteDataset(langchainClient, datasetName); - - const dataset = await langchainClient.createDataset(datasetName); - const predicted = "abcd"; - const groundTruth = "bcde"; - const example = await langchainClient.createExample( + const datasetName = "__test_create_dataset"; + const datasets = await toArray( + langchainClient.listDatasets({ datasetName }) + ); + datasets.map(async (dataset: Dataset) => { + if (dataset.name === datasetName) { + await langchainClient.deleteDataset({ datasetName }); + } + }); + const dataset = await langchainClient.createDataset(datasetName, { + dataType: "llm", + }); + await langchainClient.createExample( { input: "hello world" }, - { output: groundTruth }, + { output: "hi there" }, { datasetId: dataset.id, } ); + const loadedDataset = await langchainClient.readDataset({ datasetName }); + expect(loadedDataset.data_type).toEqual("llm"); + await langchainClient.deleteDataset({ datasetName }); + }, + 180_000 +); - const parentRunConfig: RunTreeConfig = { - name: "parent_run", - run_type: "chain", - inputs: { input: "hello world" }, - project_name: projectName, - serialized: {}, - client: langchainClient, - reference_example_id: example.id, - }; - - const parentRun = new RunTree(parentRunConfig); - await parentRun.postRun(); - await parentRun.end({ output: predicted }); - await parentRun.patchRun(); - - await waitUntilRunFound( - langchainClient, - parentRun.id, - (run: Run | undefined) => Object.keys(run?.outputs || {}).length !== 0 - ); - - const run = await langchainClient.readRun(parentRun.id); - expect(run.outputs).toEqual({ output: predicted }); - const runUrl = await langchainClient.getRunUrl({ runId: run.id }); - expect(runUrl).toContain(run.id); - - function jaccardChars(output: string, answer: string): number { - const predictionChars = new Set(output.trim().toLowerCase()); - const answerChars = new Set(answer.trim().toLowerCase()); - const intersection = [...predictionChars].filter((x) => - answerChars.has(x) - ); - const union = new Set([...predictionChars, ...answerChars]); - return intersection.length / union.size; - } - - async function grader(config: { - input: string; - prediction: string; - answer?: string; - }): Promise<{ score: number; value: string }> { - let value: string; - let score: number; - if (config.answer === null || config.answer === undefined) { - value = "AMBIGUOUS"; - score = -0.5; - } else { - score = jaccardChars(config.prediction, config.answer); - value = score > 0.9 ? "CORRECT" : "INCORRECT"; - } - return { score: score, value: value }; - } - - const evaluator = new StringEvaluator({ - evaluationName: "Jaccard", - gradingFunction: grader, - }); - - const runs = langchainClient.listRuns({ - projectName: projectName, - executionOrder: 1, - error: false, - }); +test.concurrent( + "Test share and unshare run", + async () => { + const langchainClient = new Client({ autoBatchTracing: false }); - const project = await langchainClient.readProject({ - projectName: projectName, - }); - const projectWithStats = await langchainClient.readProject({ - projectId: project.id, + // Create a new run + const runId = uuidv4(); + await langchainClient.createRun({ + name: "Test run", + inputs: { input: "hello world" }, + run_type: "chain", + id: runId, }); - expect(projectWithStats.name).toBe(project.name); - - const allFeedback: Feedback[] = []; - for await (const run of runs) { - allFeedback.push(await langchainClient.evaluateRun(run, evaluator)); - } - expect(allFeedback.length).toEqual(1); - await waitUntil( - async () => { - try { - const feedback = await langchainClient.readFeedback( - allFeedback[0].id - ); - return feedback !== null && feedback !== undefined; - } catch (e) { - return false; - } - }, - 30_000, - 1_000 - ); + await waitUntilRunFound(langchainClient, runId); + const sharedUrl = await langchainClient.shareRun(runId); + const response = await fetch(sharedUrl); + expect(response.status).toEqual(200); + expect(await langchainClient.readRunSharedLink(runId)).toEqual(sharedUrl); - const fetchedFeedback: Feedback[] = await toArray( - langchainClient.listFeedback({ - runIds: [run.id], - feedbackKeys: ["jaccard"], - feedbackSourceTypes: ["model"], - }) - ); - expect(fetchedFeedback[0].id).toEqual(allFeedback[0].id); - expect(fetchedFeedback[0].score).toEqual( - jaccardChars(predicted, groundTruth) - ); - expect(fetchedFeedback[0].value).toEqual("INCORRECT"); - - try { - await langchainClient.deleteDataset({ datasetId: dataset.id }); - await langchainClient.deleteProject({ projectName }); - } catch (e) { - console.log(e); - } + await langchainClient.unshareRun(runId); + const sharedLink = await langchainClient.readRunSharedLink(runId); + expect(sharedLink).toBe(undefined); }, - 160_000 + 180_000 ); -test.concurrent("Test persist update run", async () => { - const langchainClient = new Client({ autoBatchTracing: false }); - const projectName = "__test_persist_update_run"; - await deleteProject(langchainClient, projectName); - - const runId = uuidv4(); - await langchainClient.createRun({ - id: runId, - project_name: projectName, - name: "test_run", - run_type: "llm", - inputs: { text: "hello world" }, - }); - - await langchainClient.updateRun(runId, { outputs: { output: ["Hi"] } }); - await waitUntilRunFound( - langchainClient, - runId, - (run: Run | undefined) => Object.keys(run?.outputs || {}).length !== 0 - ); - const storedRun = await langchainClient.readRun(runId); - expect(storedRun.id).toEqual(runId); - await langchainClient.deleteProject({ projectName }); -}); - -test.concurrent("test create dataset", async () => { - const langchainClient = new Client({ autoBatchTracing: false }); - const datasetName = "__test_create_dataset"; - const datasets = await toArray(langchainClient.listDatasets({ datasetName })); - datasets.map(async (dataset: Dataset) => { - if (dataset.name === datasetName) { - await langchainClient.deleteDataset({ datasetName }); - } - }); - const dataset = await langchainClient.createDataset(datasetName, { - dataType: "llm", - }); - await langchainClient.createExample( - { input: "hello world" }, - { output: "hi there" }, - { - datasetId: dataset.id, - } - ); - const loadedDataset = await langchainClient.readDataset({ datasetName }); - expect(loadedDataset.data_type).toEqual("llm"); - await langchainClient.deleteDataset({ datasetName }); -}); - -test.concurrent("Test share and unshare run", async () => { - const langchainClient = new Client({ autoBatchTracing: false }); - - // Create a new run - const runId = uuidv4(); - await langchainClient.createRun({ - name: "Test run", - inputs: { input: "hello world" }, - run_type: "chain", - id: runId, - }); - - await waitUntilRunFound(langchainClient, runId); - const sharedUrl = await langchainClient.shareRun(runId); - const response = await fetch(sharedUrl); - expect(response.status).toEqual(200); - expect(await langchainClient.readRunSharedLink(runId)).toEqual(sharedUrl); - - await langchainClient.unshareRun(runId); - const sharedLink = await langchainClient.readRunSharedLink(runId); - expect(sharedLink).toBe(undefined); -}); - test.concurrent( "Test list datasets", async () => { @@ -380,17 +226,6 @@ test.concurrent( expect(datasetsById.map((dataset) => dataset.id)).toContain(dataset1.id); expect(datasetsById.map((dataset) => dataset.id)).toContain(dataset2.id); - // List datasets by data type - const datasetsByDataTypeIterable = langchainClient.listDatasets({ - datasetName: datasetName1, - }); - const datasetsByDataType = []; - for await (const dataset of datasetsByDataTypeIterable) { - datasetsByDataType.push(dataset); - } - expect(datasetsByDataType).toHaveLength(1); - expect(datasetsByDataType[0].id).toBe(dataset1.id); - // List datasets by name const datasetsByNameIterable = langchainClient.listDatasets({ datasetName: datasetName1, @@ -402,19 +237,10 @@ test.concurrent( expect(datasetsByName).toHaveLength(1); expect(datasetsByName.map((dataset) => dataset.id)).toContain(dataset1.id); - // Delete datasets await langchainClient.deleteDataset({ datasetId: dataset1.id }); await langchainClient.deleteDataset({ datasetId: dataset2.id }); - const remainingDatasetsIterable = langchainClient.listDatasets({ - datasetIds: [dataset1.id, dataset2.id], - }); - const remainingDatasets = []; - for await (const dataset of remainingDatasetsIterable) { - remainingDatasets.push(dataset); - } - expect(remainingDatasets).toHaveLength(0); }, - 90_000 + 180_000 ); test.concurrent( @@ -453,7 +279,7 @@ test.concurrent( feedbackSourceType: "app", }); }, - 90_000 + 180_000 ); test.concurrent( @@ -505,7 +331,7 @@ test.concurrent( expect(run2.outputs).toBeDefined(); expect(Object.keys(run2.outputs ?? {})).toHaveLength(0); }, - 90_000 + 180_000 ); test.concurrent( @@ -563,7 +389,7 @@ test.concurrent( expect(run2.extra?.metadata.LANGCHAIN_OTHER_KEY).toBeUndefined(); expect(run2.extra?.metadata).not.toHaveProperty("LANGCHAIN_API_KEY"); }, - 90_000 + 180_000 ); describe("createChatExample", () => { @@ -617,7 +443,7 @@ describe("createChatExample", () => { // Delete dataset await langchainClient.deleteDataset({ datasetId: dataset.id }); - }, 90_000); + }, 180_000); }); test.concurrent( @@ -644,7 +470,7 @@ test.concurrent( }); expect(result).toContain(runId); }, - 90_000 + 180_000 ); test.concurrent( @@ -689,5 +515,5 @@ test.concurrent( expect(examplesList2.length).toEqual(3); await client.deleteDataset({ datasetId: dataset.id }); }, - 90_000 + 180_000 ); diff --git a/python/tests/integration_tests/test_client.py b/python/tests/integration_tests/test_client.py index e2196d4f0..bf8428613 100644 --- a/python/tests/integration_tests/test_client.py +++ b/python/tests/integration_tests/test_client.py @@ -1,6 +1,5 @@ """LangSmith langchain_client Integration Tests.""" -import functools import io import os import random @@ -14,24 +13,24 @@ from freezegun import freeze_time from langchain.schema import FunctionMessage, HumanMessage -from langsmith.client import Client +from langsmith.client import ID_TYPE, Client from langsmith.schemas import DataType -from langsmith.utils import ( - LangSmithConnectionError, - LangSmithError, -) +from langsmith.utils import LangSmithConnectionError, LangSmithError def wait_for( - condition: Callable[[], bool], max_attempts: int = 20, sleep_time: int = 3 + condition: Callable[[], bool], max_sleep_time: int = 120, sleep_time: int = 3 ): - for _ in range(max_attempts): + """Wait for a condition to be true.""" + start_time = time.time() + while time.time() - start_time < max_sleep_time: try: if condition(): return except Exception: time.sleep(sleep_time) - raise ValueError("Callable did not return in time") + total_time = time.time() - start_time + raise ValueError(f"Callable did not return within {total_time}") @pytest.fixture @@ -136,43 +135,36 @@ def test_datasets(langchain_client: Client) -> None: langchain_client.delete_dataset(dataset_id=dataset_id) -@freeze_time("2023-01-01") +@pytest.mark.skip(reason="This test is flaky") def test_persist_update_run(langchain_client: Client) -> None: """Test the persist and update methods work as expected.""" - project_name = "__test_persist_update_run" - if project_name in [sess.name for sess in langchain_client.list_projects()]: + project_name = "__test_persist_update_run" + uuid4().hex[:4] + if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) - start_time = datetime.now() - revision_id = uuid4() - run: dict = dict( - id=uuid4(), - name="test_run", - run_type="llm", - inputs={"text": "hello world"}, - project_name=project_name, - api_url=os.getenv("LANGCHAIN_ENDPOINT"), - start_time=start_time, - extra={"extra": "extra"}, - revision_id=revision_id, - ) - langchain_client.create_run(**run) - run["outputs"] = {"output": ["Hi"]} - run["extra"]["foo"] = "bar" - langchain_client.update_run(run["id"], **run) try: - for _ in range(10): - try: - stored_run = langchain_client.read_run(run["id"]) - if stored_run.end_time is not None: - break - except LangSmithError: - time.sleep(3) - + start_time = datetime.now() + revision_id = uuid4() + run: dict = dict( + id=uuid4(), + name="test_run", + run_type="llm", + inputs={"text": "hello world"}, + project_name=project_name, + api_url=os.getenv("LANGCHAIN_ENDPOINT"), + start_time=start_time, + extra={"extra": "extra"}, + revision_id=revision_id, + ) + langchain_client.create_run(**run) + run["outputs"] = {"output": ["Hi"]} + run["extra"]["foo"] = "bar" + langchain_client.update_run(run["id"], **run) + wait_for(lambda: langchain_client.read_run(run["id"]).end_time is not None) + stored_run = langchain_client.read_run(run["id"]) assert stored_run.id == run["id"] assert stored_run.outputs == run["outputs"] assert stored_run.start_time == run["start_time"] - assert stored_run.extra - assert stored_run.extra["metadata"]["revision_id"] == str(revision_id) + assert stored_run.revision_id == str(revision_id) finally: langchain_client.delete_project(project_name=project_name) @@ -193,15 +185,14 @@ def test_create_project( ) -> None: """Test the project creation""" monkeypatch.setenv("LANGCHAIN_ENDPOINT", "https://api.smith.langchain.com") + project_name = "__test_create_project" + uuid4().hex[:4] + if langchain_client.has_project(project_name): + langchain_client.delete_project(project_name=project_name) try: - langchain_client.read_project(project_name="__test_create_project") - langchain_client.delete_project(project_name="__test_create_project") - except LangSmithError: - pass - project_name = "__test_create_project" - project = langchain_client.create_project(project_name=project_name) - assert project.name == project_name - langchain_client.delete_project(project_id=project.id) + project = langchain_client.create_project(project_name=project_name) + assert project.name == project_name + finally: + langchain_client.delete_project(project_name=project_name) @freeze_time("2023-01-01") @@ -211,7 +202,7 @@ def test_create_dataset( """Test persisting runs and adding feedback.""" monkeypatch.setenv("LANGCHAIN_ENDPOINT", "https://api.smith.langchain.com") dataset_name = "__test_create_dataset" - if dataset_name in [dataset.name for dataset in langchain_client.list_datasets()]: + if langchain_client.has_dataset(dataset_name=dataset_name): langchain_client.delete_dataset(dataset_name=dataset_name) dataset = langchain_client.create_dataset(dataset_name, data_type=DataType.llm) ground_truth = "bcde" @@ -227,11 +218,10 @@ def test_create_dataset( @freeze_time("2023-01-01") def test_list_datasets(langchain_client: Client) -> None: - for name in ["___TEST dataset1", "___TEST dataset2"]: - datasets = list(langchain_client.list_datasets(dataset_name=name)) - if datasets: - for dataset in datasets: - langchain_client.delete_dataset(dataset_id=dataset.id) + if langchain_client.has_dataset(dataset_name="___TEST dataset1"): + langchain_client.delete_dataset(dataset_name="___TEST dataset1") + if langchain_client.has_dataset(dataset_name="___TEST dataset2"): + langchain_client.delete_dataset(dataset_name="___TEST dataset2") dataset1 = langchain_client.create_dataset( "___TEST dataset1", data_type=DataType.llm ) @@ -264,48 +254,59 @@ def test_list_datasets(langchain_client: Client) -> None: langchain_client.delete_dataset(dataset_id=dataset2.id) -@freeze_time("2023-01-01") +@pytest.mark.skip(reason="This test is flaky") def test_create_run_with_masked_inputs_outputs( langchain_client: Client, monkeypatch: pytest.MonkeyPatch ) -> None: - project_name = "__test_create_run_with_masked_inputs_outputs" + project_name = "__test_create_run_with_masked_inputs_outputs" + uuid4().hex[:4] monkeypatch.setenv("LANGCHAIN_HIDE_INPUTS", "true") monkeypatch.setenv("LANGCHAIN_HIDE_OUTPUTS", "true") - for project in langchain_client.list_projects(): - if project.name == project_name: - langchain_client.delete_project(project_name=project_name) - - run_id = uuid4() - langchain_client.create_run( - id=run_id, - project_name=project_name, - name="test_run", - run_type="llm", - inputs={"prompt": "hello world"}, - outputs={"generation": "hi there"}, - start_time=datetime.utcnow(), - end_time=datetime.utcnow(), - hide_inputs=True, - hide_outputs=True, - ) + if langchain_client.has_project(project_name): + langchain_client.delete_project(project_name=project_name) + try: + run_id = uuid4() + langchain_client.create_run( + id=run_id, + project_name=project_name, + name="test_run", + run_type="llm", + inputs={"prompt": "hello world"}, + outputs={"generation": "hi there"}, + start_time=datetime.utcnow(), + end_time=datetime.utcnow(), + hide_inputs=True, + hide_outputs=True, + ) - run_id2 = uuid4() - langchain_client.create_run( - id=run_id2, - project_name=project_name, - name="test_run_2", - run_type="llm", - inputs={"messages": "hello world 2"}, - start_time=datetime.utcnow(), - hide_inputs=True, - ) + run_id2 = uuid4() + langchain_client.create_run( + id=run_id2, + project_name=project_name, + name="test_run_2", + run_type="llm", + inputs={"messages": "hello world 2"}, + start_time=datetime.utcnow(), + hide_inputs=True, + ) - langchain_client.update_run( - run_id2, - outputs={"generation": "hi there 2"}, - end_time=datetime.utcnow(), - hide_outputs=True, - ) + langchain_client.update_run( + run_id2, + outputs={"generation": "hi there 2"}, + end_time=datetime.utcnow(), + hide_outputs=True, + ) + wait_for(lambda: langchain_client.read_run(run_id).end_time is not None) + stored_run = langchain_client.read_run(run_id) + assert "hello" not in str(stored_run.inputs) + assert stored_run.outputs is not None + assert "hi" not in str(stored_run.outputs) + wait_for(lambda: langchain_client.read_run(run_id2).end_time is not None) + stored_run2 = langchain_client.read_run(run_id2) + assert "hello" not in str(stored_run2.inputs) + assert stored_run2.outputs is not None + assert "hi" not in str(stored_run2.outputs) + finally: + langchain_client.delete_project(project_name=project_name) @freeze_time("2023-01-01") @@ -443,6 +444,7 @@ def test_get_info() -> None: assert info.batch_ingest_config["size_limit"] > 0 # type: ignore +@pytest.mark.skip(reason="This test is flaky") @pytest.mark.parametrize("add_metadata", [True, False]) @pytest.mark.parametrize("do_batching", [True, False]) def test_update_run_extra(add_metadata: bool, do_batching: bool) -> None: @@ -469,16 +471,16 @@ def test_update_run_extra(add_metadata: bool, do_batching: bool) -> None: revision_id = uuid4() langchain_client.create_run(**run, revision_id=revision_id) # type: ignore - def _get_run(has_end: bool = False) -> bool: + def _get_run(run_id: ID_TYPE, has_end: bool = False) -> bool: try: - r = langchain_client.read_run(run["id"]) # type: ignore + r = langchain_client.read_run(run_id) # type: ignore if has_end: return r.end_time is not None return True except LangSmithError: return False - wait_for(_get_run) + wait_for(lambda: _get_run(run_id)) created_run = langchain_client.read_run(run_id) assert created_run.metadata["foo"] == "bar" assert created_run.metadata["revision_id"] == str(revision_id) @@ -487,12 +489,12 @@ def _get_run(has_end: bool = False) -> bool: run["extra"]["metadata"]["foo2"] = "baz" # type: ignore run["tags"] = ["tag3"] langchain_client.update_run(run_id, **run) # type: ignore - wait_for(functools.partial(_get_run, has_end=True)) + wait_for(lambda: _get_run(run_id, has_end=True)) updated_run = langchain_client.read_run(run_id) - assert updated_run.metadata["foo"] == "bar", updated_run.metadata # type: ignore - assert updated_run.revision_id == str(revision_id), updated_run.metadata + assert updated_run.metadata["foo"] == "bar" # type: ignore + assert updated_run.revision_id == str(revision_id) if add_metadata: - assert updated_run.metadata["foo2"] == "baz", updated_run.metadata # type: ignore + updated_run.metadata["foo2"] == "baz" # type: ignore assert updated_run.tags == ["tag3"] else: assert updated_run.tags == ["tag1", "tag2"] From d2ee8b530c3e0f0996083ed524396cf68aa3c031 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 08:07:10 -0800 Subject: [PATCH 09/25] rm 429 (#416) --- python/langsmith/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 67158485c..fd56a6ec7 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -132,7 +132,7 @@ def _default_retry_config() -> Retry: """ retry_params = dict( total=3, - status_forcelist=[502, 503, 504, 408, 425, 429], + status_forcelist=[502, 503, 504, 408, 425], backoff_factor=0.5, # Sadly urllib3 1.x doesn't support backoff_jitter raise_on_redirect=False, From 4d6ccae99bc3771ca294d56e49592cd035d7b06b Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 08:08:53 -0800 Subject: [PATCH 10/25] Bump (#417) --- python/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyproject.toml b/python/pyproject.toml index 60e5f0634..dced0ec3e 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.87" +version = "0.0.88" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" From 8c3c4c9c39e5ab74f3b5060aceeccdbd9766a9b8 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Fri, 9 Feb 2024 12:40:03 -0800 Subject: [PATCH 11/25] Fix start_time getting overwrriten when using batch endpoint (#421) --- python/langsmith/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/langsmith/client.py b/python/langsmith/client.py index fd56a6ec7..56afb5115 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -840,7 +840,7 @@ def upload_csv( @staticmethod def _run_transform( - run: Union[ls_schemas.Run, dict, ls_schemas.RunLikeDict], + run: Union[ls_schemas.Run, dict, ls_schemas.RunLikeDict], update: bool = False ) -> dict: """ Transforms the given run object into a dictionary representation. @@ -863,7 +863,7 @@ def _run_transform( run_create["inputs"] = _hide_inputs(run_create["inputs"]) if "outputs" in run_create: run_create["outputs"] = _hide_outputs(run_create["outputs"]) - if not run_create.get("start_time"): + if not update and not run_create.get("start_time"): run_create["start_time"] = datetime.datetime.utcnow() return run_create @@ -1024,7 +1024,7 @@ def batch_ingest_runs( return # transform and convert to dicts create_dicts = [self._run_transform(run) for run in create or []] - update_dicts = [self._run_transform(run) for run in update or []] + update_dicts = [self._run_transform(run, update=True) for run in update or []] # combine post and patch dicts where possible if update_dicts and create_dicts: create_by_id = {run["id"]: run for run in create_dicts} From 5bed6af4fbbe6bf400df49bcbcedf88f44452ca0 Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 12:55:44 -0800 Subject: [PATCH 12/25] update --- python/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyproject.toml b/python/pyproject.toml index dced0ec3e..8bed494e1 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.88" +version = "0.0.89" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" From c5742d83fbce61406a20d81108aee263c1d05eff Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Fri, 9 Feb 2024 16:28:09 -0800 Subject: [PATCH 13/25] Adds batch tracing support check for JS (#411) @hinthornw @nfcampos I think this is better than doing a check on startup? --- js/src/client.ts | 35 +++++++++++++++++ js/src/tests/batch_client.test.ts | 65 +++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+) diff --git a/js/src/client.ts b/js/src/client.ts index b650642c0..9770dc607 100644 --- a/js/src/client.ts +++ b/js/src/client.ts @@ -242,6 +242,8 @@ export class Client { private autoBatchTracing = false; + private batchEndpointSupported?: boolean; + private pendingAutoBatchedRuns: AutoBatchQueueItem[] = []; private pendingAutoBatchedRunLimit = 100; @@ -537,6 +539,21 @@ export class Client { } } + protected async batchEndpointIsSupported() { + const response = await fetch(`${this.apiUrl}/info`, { + method: "GET", + headers: { Accept: "application/json" }, + signal: AbortSignal.timeout(this.timeout_ms), + }); + if (!response.ok) { + // consume the response body to release the connection + // https://undici.nodejs.org/#/?id=garbage-collection + await response.text(); + return false; + } + return true; + } + public async createRun(run: CreateRunParams): Promise { if (!this._filterForSampling([run]).length) { return; @@ -632,6 +649,24 @@ export class Client { preparedCreateParams = await mergeRuntimeEnvIntoRunCreates( preparedCreateParams ); + if (this.batchEndpointSupported === undefined) { + this.batchEndpointSupported = await this.batchEndpointIsSupported(); + } + if (!this.batchEndpointSupported) { + this.autoBatchTracing = false; + for (const preparedCreateParam of body.post) { + await this.createRun(preparedCreateParam as CreateRunParams); + } + for (const preparedUpdateParam of body.patch) { + if (preparedUpdateParam.id !== undefined) { + await this.updateRun( + preparedUpdateParam.id, + preparedUpdateParam as UpdateRunParams + ); + } + } + return; + } const headers = { ...this.headers, "Content-Type": "application/json", diff --git a/js/src/tests/batch_client.test.ts b/js/src/tests/batch_client.test.ts index 36dab304f..9b7a10568 100644 --- a/js/src/tests/batch_client.test.ts +++ b/js/src/tests/batch_client.test.ts @@ -15,6 +15,9 @@ describe("Batch client tracing", () => { ok: true, text: () => "", }); + jest + .spyOn(client as any, "batchEndpointIsSupported") + .mockResolvedValue(true); const projectName = "__test_batch"; const runId = uuidv4(); @@ -68,6 +71,9 @@ describe("Batch client tracing", () => { ok: true, text: () => "", }); + jest + .spyOn(client as any, "batchEndpointIsSupported") + .mockResolvedValue(true); const projectName = "__test_batch"; const runId = uuidv4(); @@ -134,6 +140,9 @@ describe("Batch client tracing", () => { ok: true, text: () => "", }); + jest + .spyOn(client as any, "batchEndpointIsSupported") + .mockResolvedValue(true); const projectName = "__test_batch"; const runId = uuidv4(); @@ -235,6 +244,9 @@ describe("Batch client tracing", () => { ok: true, text: () => "", }); + jest + .spyOn(client as any, "batchEndpointIsSupported") + .mockResolvedValue(true); const projectName = "__test_batch"; const runIds = await Promise.all( @@ -296,4 +308,57 @@ describe("Batch client tracing", () => { patch: [], }); }); + + it("If batching is unsupported, fall back to old endpoint", async () => { + const client = new Client({ + apiKey: "test-api-key", + autoBatchTracing: true, + }); + const callSpy = jest + .spyOn((client as any).caller, "call") + .mockResolvedValue({ + ok: true, + text: () => "", + }); + jest + .spyOn(client as any, "batchEndpointIsSupported") + .mockResolvedValue(false); + const projectName = "__test_batch"; + + const runId = uuidv4(); + const dottedOrder = convertToDottedOrderFormat( + new Date().getTime() / 1000, + runId + ); + await client.createRun({ + id: runId, + project_name: projectName, + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + await new Promise((resolve) => setTimeout(resolve, 300)); + + const calledRequestParam: any = callSpy.mock.calls[0][2]; + expect(JSON.parse(calledRequestParam?.body)).toMatchObject({ + id: runId, + session_name: projectName, + extra: expect.anything(), + start_time: expect.any(Number), + name: "test_run", + run_type: "llm", + inputs: { text: "hello world" }, + trace_id: runId, + dotted_order: dottedOrder, + }); + + expect(callSpy).toHaveBeenCalledWith( + fetch, + "https://api.smith.langchain.com/runs", + expect.objectContaining({ body: expect.any(String) }) + ); + }); }); From 3c5685f1b440fee64966ba2639d6762c5c3f2810 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 16:53:18 -0800 Subject: [PATCH 14/25] Add dotted order and trace id to trace tree JS (#420) --- js/src/run_trees.ts | 9 +++++-- js/src/tests/batch_client.int.test.ts | 34 ++++++++++++++++++++++++++- 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/js/src/run_trees.ts b/js/src/run_trees.ts index 50a4b3e8a..6b98ae661 100644 --- a/js/src/run_trees.ts +++ b/js/src/run_trees.ts @@ -25,7 +25,7 @@ export function convertToDottedOrderFormat(epoch: number, runId: string) { export interface RunTreeConfig { name: string; - run_type: string; + run_type?: string; id?: string; project_name?: string; parent_run?: RunTree; @@ -44,7 +44,7 @@ export interface RunTreeConfig { export class RunTree implements BaseRun { id: string; name: RunTreeConfig["name"]; - run_type: RunTreeConfig["run_type"]; + run_type: string; project_name: string; parent_run?: RunTree; child_runs: RunTree[]; @@ -87,6 +87,7 @@ export class RunTree implements BaseRun { private static getDefaultConfig(): object { return { id: uuid.v4(), + run_type: "chain", project_name: getEnvironmentVariable("LANGCHAIN_PROJECT") ?? getEnvironmentVariable("LANGCHAIN_SESSION") ?? // TODO: Deprecate @@ -168,6 +169,8 @@ export class RunTree implements BaseRun { session_name: run.project_name, child_runs: child_runs, parent_run_id: parent_run_id, + trace_id: run.trace_id, + dotted_order: run.dotted_order, }; return persistedRun; } @@ -195,6 +198,8 @@ export class RunTree implements BaseRun { reference_example_id: this.reference_example_id, extra: this.extra, events: this.events, + dotted_order: this.dotted_order, + trace_id: this.trace_id, }; await this.client.updateRun(this.id, runUpdate); diff --git a/js/src/tests/batch_client.int.test.ts b/js/src/tests/batch_client.int.test.ts index 0a44dca6f..35589dbb5 100644 --- a/js/src/tests/batch_client.int.test.ts +++ b/js/src/tests/batch_client.int.test.ts @@ -1,5 +1,5 @@ import { Client } from "../client.js"; -import { convertToDottedOrderFormat } from "../run_trees.js"; +import { RunTree, convertToDottedOrderFormat } from "../run_trees.js"; import { v4 as uuidv4 } from "uuid"; async function deleteProject(langchainClient: Client, projectName: string) { @@ -174,3 +174,35 @@ test.concurrent( }, 180_000 ); + +test.concurrent( + "Test persist update run tree", + async () => { + const langchainClient = new Client({ + autoBatchTracing: true, + callerOptions: { maxRetries: 0 }, + }); + const projectName = "__test_persist_update_run_tree"; + await deleteProject(langchainClient, projectName); + const runId = uuidv4(); + const runTree = new RunTree({ + name: "Test Run Tree", + id: runId, + inputs: { input: "foo1" }, + client: langchainClient, + project_name: projectName, + }); + await runTree.postRun(); + await runTree.end({ output: "foo2" }); + await runTree.patchRun(); + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + expect(storedRun.dotted_order).toEqual(runTree.dotted_order); + expect(storedRun.trace_id).toEqual(runTree.trace_id); + expect(storedRun.inputs).toEqual({ input: "foo1" }); + expect(storedRun.outputs).toEqual({ output: "foo2" }); + await langchainClient.deleteProject({ projectName }); + }, + 180_000 +); From be5c741b46eabac4d648742ed249789a86815dab Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 16:57:36 -0800 Subject: [PATCH 15/25] Filter retries (#419) --- README.md | 2 ++ python/README.md | 1 + python/langsmith/client.py | 28 +++++++++++---- python/langsmith/utils.py | 71 +++++++++++++++++++++++++++++++++++--- 4 files changed, 91 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index d8a7dc02c..6e4e58ab5 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ To get started with the Python SDK, [install the package](https://pypi.org/proje ```bash pip install -U langsmith +export LANGCHAIN_TRACING_V2=true export LANGCHAIN_API_KEY=ls_... ``` @@ -35,6 +36,7 @@ To get started with the JavaScript / TypeScript SDK, [install the package](https ```bash yarn add langsmith +export LANGCHAIN_TRACING_V2=true export LANGCHAIN_API_KEY=ls_... ``` diff --git a/python/README.md b/python/README.md index aa5c22fa5..c4fe2ec6f 100644 --- a/python/README.md +++ b/python/README.md @@ -6,6 +6,7 @@ To install: ```bash pip install -U langsmith +export LANGCHAIN_TRACING_V2=true export LANGCHAIN_API_KEY=ls_... ``` diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 56afb5115..15ac6f2ce 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -53,8 +53,7 @@ import pandas as pd logger = logging.getLogger(__name__) -# Filter the Connection pool is full warnings from urllib3 -logging.getLogger("urllib3.connectionpool").addFilter(ls_utils.FilterPoolFullWarning()) +_urllib3_logger = logging.getLogger("urllib3.connectionpool") def _is_localhost(url: str) -> bool: @@ -149,7 +148,7 @@ def _default_retry_config() -> Retry: # Retry on all methods retry_params["allowed_methods"] = None - return Retry(**retry_params) # type: ignore + return ls_utils.LangSmithRetry(**retry_params) # type: ignore _PRIMITIVE_TYPES = (str, int, float, bool) @@ -310,6 +309,13 @@ def _as_uuid(value: ID_TYPE, var: Optional[str] = None) -> uuid.UUID: ) from e +@functools.lru_cache(maxsize=1) +def _parse_url(url): + parsed_url = urllib_parse.urlparse(url) + host = parsed_url.netloc.split(":")[0] + return host + + @dataclass(order=True) class TracingQueueItem: priority: str @@ -426,6 +432,10 @@ def __repr__(self) -> str: """ return f"Client (API URL: {self.api_url})" + @property + def _host(self) -> str: + return _parse_url(self.api_url) + @property def _host_url(self) -> str: """The web host url.""" @@ -535,7 +545,10 @@ def request_with_retries( LangSmithError If the request fails. """ - + logging_filters = [ + ls_utils.FilterLangSmithRetry(), + ls_utils.FilterPoolFullWarning(host=str(self._host)), + ] retry_on_: Tuple[Type[BaseException], ...] = ( *(retry_on or []), *(ls_utils.LangSmithConnectionError, ls_utils.LangSmithAPIError), @@ -545,9 +558,10 @@ def request_with_retries( for idx in range(stop_after_attempt): try: try: - response = self.session.request( - request_method, url, stream=False, **request_kwargs - ) + with ls_utils.filter_logs(_urllib3_logger, logging_filters): + response = self.session.request( + request_method, url, stream=False, **request_kwargs + ) ls_utils.raise_for_status_with_text(response) return response except requests.HTTPError as e: diff --git a/python/langsmith/utils.py b/python/langsmith/utils.py index abf7edb4c..8cffd488e 100644 --- a/python/langsmith/utils.py +++ b/python/langsmith/utils.py @@ -1,13 +1,27 @@ """Generic utility functions.""" +import contextlib import enum import functools import logging import os import subprocess -from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, Union +import threading +from typing import ( + Any, + Callable, + Dict, + Generator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) import requests +from urllib3.util import Retry from langsmith import schemas as ls_schemas @@ -280,8 +294,57 @@ def get_tracer_project(return_default_value=True) -> Optional[str]: class FilterPoolFullWarning(logging.Filter): """Filter urrllib3 warnings logged when the connection pool isn't reused.""" + def __init__(self, name: str = "", host: str = "") -> None: + super().__init__(name) + self._host = host + def filter(self, record) -> bool: """urllib3.connectionpool:Connection pool is full, discarding connection: ...""" - return ( - "Connection pool is full, discarding connection" not in record.getMessage() - ) + msg = record.getMessage() + if "Connection pool is full, discarding connection" not in msg: + return True + return self._host not in msg + + +class FilterLangSmithRetry(logging.Filter): + """Filter for retries from this lib.""" + + def filter(self, record) -> bool: + """Filter retries from this library.""" + # We re-raise/log manually. + msg = record.getMessage() + return "LangSmithRetry" not in msg + + +class LangSmithRetry(Retry): + """Wrapper to filter logs with this name.""" + + +_FILTER_LOCK = threading.RLock() + + +@contextlib.contextmanager +def filter_logs( + logger: logging.Logger, filters: Sequence[logging.Filter] +) -> Generator[None, None, None]: + """ + Temporarily adds specified filters to a logger. + + Parameters: + - logger: The logger to which the filters will be added. + - filters: A sequence of logging.Filter objects to be temporarily added + to the logger. + """ + with _FILTER_LOCK: + for filter in filters: + logger.addFilter(filter) + # Not actually perfectly thread-safe, but it's only log filters + try: + yield + finally: + with _FILTER_LOCK: + for filter in filters: + try: + logger.removeFilter(filter) + except BaseException: + _LOGGER.warning("Failed to remove filter") From 4846756b2e93de13745b71c1f0271c7c9bcfdd3f Mon Sep 17 00:00:00 2001 From: Savvas Mantzouranidis Date: Sat, 10 Feb 2024 01:01:12 +0000 Subject: [PATCH 16/25] fix depracation warning of .utcnow() (#418) as seen in the [official docs](https://docs.python.org/3/library/datetime.html#datetime.datetime.utcnow) datetime.datetime.utcnow() is being deprecated and the suggested function to use instead is datetime.datetime.now(datetime.UTC) (which also supports the .isoformat() function) --------- Co-authored-by: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> --- python/langsmith/client.py | 4 ++-- python/langsmith/run_trees.py | 8 +++---- python/langsmith/schemas.py | 12 +++++----- python/tests/integration_tests/test_client.py | 23 +++++++++++-------- 4 files changed, 26 insertions(+), 21 deletions(-) diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 15ac6f2ce..f77d6373b 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -878,7 +878,7 @@ def _run_transform( if "outputs" in run_create: run_create["outputs"] = _hide_outputs(run_create["outputs"]) if not update and not run_create.get("start_time"): - run_create["start_time"] = datetime.datetime.utcnow() + run_create["start_time"] = datetime.datetime.now(datetime.timezone.utc) return run_create @staticmethod @@ -1146,7 +1146,7 @@ def update_run( if end_time is not None: data["end_time"] = end_time.isoformat() else: - data["end_time"] = datetime.datetime.utcnow().isoformat() + data["end_time"] = datetime.datetime.now(datetime.timezone.utc).isoformat() if error is not None: data["error"] = error if inputs is not None: diff --git a/python/langsmith/run_trees.py b/python/langsmith/run_trees.py index 3769e98ac..7f10a820a 100644 --- a/python/langsmith/run_trees.py +++ b/python/langsmith/run_trees.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Dict, List, Optional, cast from uuid import UUID, uuid4 @@ -29,7 +29,7 @@ class RunTree(RunBase): name: str id: UUID = Field(default_factory=uuid4) run_type: str = Field(default="chain") - start_time: datetime = Field(default_factory=datetime.utcnow) + start_time: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) parent_run: Optional[RunTree] = Field(default=None, exclude=True) child_runs: List[RunTree] = Field( default_factory=list, @@ -102,7 +102,7 @@ def end( end_time: Optional[datetime] = None, ) -> None: """Set the end time of the run and all child runs.""" - self.end_time = end_time or datetime.utcnow() + self.end_time = end_time or datetime.now(timezone.utc) if outputs is not None: self.outputs = outputs if error is not None: @@ -135,7 +135,7 @@ def create_child( error=error, run_type=run_type, reference_example_id=reference_example_id, - start_time=start_time or datetime.utcnow(), + start_time=start_time or datetime.now(timezone.utc), end_time=end_time, extra=extra or {}, parent_run=self, diff --git a/python/langsmith/schemas.py b/python/langsmith/schemas.py index de130eb9f..ef269f9b6 100644 --- a/python/langsmith/schemas.py +++ b/python/langsmith/schemas.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from enum import Enum from typing import ( Any, @@ -57,7 +57,7 @@ class ExampleCreate(ExampleBase): """Example create model.""" id: Optional[UUID] - created_at: datetime = Field(default_factory=datetime.utcnow) + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) class Example(ExampleBase): @@ -127,7 +127,7 @@ class DatasetCreate(DatasetBase): """Dataset create model.""" id: Optional[UUID] = None - created_at: datetime = Field(default_factory=datetime.utcnow) + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) class Dataset(DatasetBase): @@ -422,7 +422,7 @@ class TracerSession(BaseModel): id: UUID """The ID of the project.""" - start_time: datetime = Field(default_factory=datetime.utcnow) + start_time: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) """The time the project was created.""" end_time: Optional[datetime] = None """The time the project was ended.""" @@ -514,8 +514,8 @@ class AnnotationQueue(BaseModel): id: UUID name: str description: Optional[str] = None - created_at: datetime = Field(default_factory=datetime.utcnow) - updated_at: datetime = Field(default_factory=datetime.utcnow) + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) tenant_id: UUID diff --git a/python/tests/integration_tests/test_client.py b/python/tests/integration_tests/test_client.py index bf8428613..1f096efaf 100644 --- a/python/tests/integration_tests/test_client.py +++ b/python/tests/integration_tests/test_client.py @@ -1,11 +1,12 @@ """LangSmith langchain_client Integration Tests.""" +import datetime import io import os import random import string import time -from datetime import datetime, timedelta +from datetime import timedelta from typing import Any, Callable, Dict, cast from uuid import uuid4 @@ -142,7 +143,7 @@ def test_persist_update_run(langchain_client: Client) -> None: if langchain_client.has_project(project_name): langchain_client.delete_project(project_name=project_name) try: - start_time = datetime.now() + start_time = datetime.datetime.now() revision_id = uuid4() run: dict = dict( id=uuid4(), @@ -272,8 +273,8 @@ def test_create_run_with_masked_inputs_outputs( run_type="llm", inputs={"prompt": "hello world"}, outputs={"generation": "hi there"}, - start_time=datetime.utcnow(), - end_time=datetime.utcnow(), + start_time=datetime.datetime.now(datetime.timezone.utc), + end_time=datetime.datetime.now(datetime.timezone.utc), hide_inputs=True, hide_outputs=True, ) @@ -285,14 +286,14 @@ def test_create_run_with_masked_inputs_outputs( name="test_run_2", run_type="llm", inputs={"messages": "hello world 2"}, - start_time=datetime.utcnow(), + start_time=datetime.datetime.now(datetime.timezone.utc), hide_inputs=True, ) langchain_client.update_run( run_id2, outputs={"generation": "hi there 2"}, - end_time=datetime.utcnow(), + end_time=datetime.datetime.now(datetime.timezone.utc), hide_outputs=True, ) wait_for(lambda: langchain_client.read_run(run_id).end_time is not None) @@ -364,8 +365,12 @@ def test_batch_ingest_runs(langchain_client: Client) -> None: _session = "__test_batch_ingest_runs" trace_id = uuid4() run_id_2 = uuid4() - current_time = datetime.utcnow().strftime("%Y%m%dT%H%M%S%fZ") - later_time = (datetime.utcnow() + timedelta(seconds=1)).strftime("%Y%m%dT%H%M%S%fZ") + current_time = datetime.datetime.now(datetime.timezone.utc).strftime( + "%Y%m%dT%H%M%S%fZ" + ) + later_time = ( + datetime.datetime.now(datetime.timezone.utc) + timedelta(seconds=1) + ).strftime("%Y%m%dT%H%M%S%fZ") runs_to_create = [ { "id": str(trace_id), @@ -453,7 +458,7 @@ def test_update_run_extra(add_metadata: bool, do_batching: bool) -> None: run: Dict[str, Any] = { "id": run_id, "name": "run 1", - "start_time": datetime.utcnow(), + "start_time": datetime.datetime.now(datetime.timezone.utc), "run_type": "chain", "inputs": {"input1": 1, "input2": 2}, "outputs": {"output1": 3, "output2": 4}, From b214f3a5018ff686f5ddd4645ded217634424c1b Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Fri, 9 Feb 2024 17:23:18 -0800 Subject: [PATCH 17/25] Bump version (#430) JS Prerelease Python release --- js/package.json | 2 +- js/src/index.ts | 2 +- python/pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/js/package.json b/js/package.json index ad72d9095..1990f198f 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "langsmith", - "version": "0.0.68", + "version": "0.0.69-rc0", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", "packageManager": "yarn@1.22.19", "files": [ diff --git a/js/src/index.ts b/js/src/index.ts index 79b99b853..ed87a38c6 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -5,4 +5,4 @@ export { Dataset, Example, TracerSession, Run, Feedback } from "./schemas.js"; export { RunTree, RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version -export const __version__ = "0.0.68"; +export const __version__ = "0.0.69-rc0"; diff --git a/python/pyproject.toml b/python/pyproject.toml index 8bed494e1..c14eb922f 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.89" +version = "0.0.90" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" From 6ff1369f7c226e79782fbe51106d67fbe466218f Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Sat, 10 Feb 2024 18:38:24 -0800 Subject: [PATCH 18/25] =?UTF-8?q?OpenAI=20Wrapper:=20Don't=20ignore=20func?= =?UTF-8?q?tion/tool=20calls=20in=20openai=20streaming=20=E2=80=A6=20(#431?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/langsmith/wrappers/_openai.py | 77 ++++++++++++++++++++++++---- 1 file changed, 68 insertions(+), 9 deletions(-) diff --git a/python/langsmith/wrappers/_openai.py b/python/langsmith/wrappers/_openai.py index e8966ff9c..8a119b2a4 100644 --- a/python/langsmith/wrappers/_openai.py +++ b/python/langsmith/wrappers/_openai.py @@ -1,7 +1,8 @@ from __future__ import annotations import functools -from typing import TYPE_CHECKING, Callable, List, TypeVar, Union +from collections import defaultdict +from typing import TYPE_CHECKING, Any, Callable, DefaultDict, Dict, List, TypeVar, Union from langsmith import run_helpers @@ -9,25 +10,83 @@ from openai import AsyncOpenAI, OpenAI from openai.types.chat.chat_completion_chunk import ( ChatCompletionChunk, + Choice, + ChoiceDeltaToolCall, ) from openai.types.completion import Completion C = TypeVar("C", bound=Union["OpenAI", "AsyncOpenAI"]) +def _reduce_choices(choices: List[Choice]) -> dict: + reversed_choices = list(reversed(choices)) + message: Dict[str, Any] = { + "role": "assistant", + "content": "", + } + for c in reversed_choices: + if c.delta.role: + message["role"] = c.delta.role + break + tool_calls: DefaultDict[int, List[ChoiceDeltaToolCall]] = defaultdict(list) + for c in choices: + if c.delta.content: + message["content"] += c.delta.content + if c.delta.function_call: + if not message.get("function_call"): + message["function_call"] = {"name": "", "arguments": ""} + if c.delta.function_call.name: + message["function_call"]["name"] += c.delta.function_call.name + if c.delta.function_call.arguments: + message["function_call"]["arguments"] += c.delta.function_call.arguments + if c.delta.tool_calls: + for tool_call in c.delta.tool_calls: + tool_calls[c.index].append(tool_call) + if tool_calls: + message["tool_calls"] = [None for _ in tool_calls.keys()] + for index, tool_call_chunks in tool_calls.items(): + message["tool_calls"][index] = { + "index": index, + "id": next((c.id for c in tool_call_chunks if c.id), None), + "type": next((c.type for c in tool_call_chunks if c.type), None), + } + for chunk in tool_call_chunks: + if chunk.function: + if not message["tool_calls"][index].get("function"): + message["tool_calls"][index]["function"] = { + "name": "", + "arguments": "", + } + if chunk.function.name: + message["tool_calls"][index]["function"][ + "name" + ] += chunk.function.name + if chunk.function.arguments: + message["tool_calls"][index]["function"][ + "arguments" + ] += chunk.function.arguments + return { + "index": choices[0].index, + "finish_reason": next( + (c.finish_reason for c in reversed_choices if c.finish_reason), + None, + ), + "message": message, + } + + def _reduce_chat(all_chunks: List[ChatCompletionChunk]) -> dict: - all_content = [] + choices_by_index: DefaultDict[int, List[Choice]] = defaultdict(list) for chunk in all_chunks: - content = chunk.choices[0].delta.content - if content is not None: - all_content.append(content) - content = "".join(all_content) + for choice in chunk.choices: + choices_by_index[choice.index].append(choice) if all_chunks: d = all_chunks[-1].model_dump() - d["choices"] = [{"message": {"role": "assistant", "content": content}}] + d["choices"] = [ + _reduce_choices(choices) for choices in choices_by_index.values() + ] else: - d = {"choices": [{"message": {"role": "assistant", "content": content}}]} - + d = {"choices": [{"message": {"role": "assistant", "content": ""}}]} return d From 5502cbf2334bf7610087b12f84ef2611bd3c2266 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:26:45 -0800 Subject: [PATCH 19/25] Add first token time for oai wrapper (#435) --- js/package.json | 2 +- python/langsmith/run_helpers.py | 34 ++++++++++++++++++++++++++++----- python/langsmith/run_trees.py | 8 ++++++-- 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/js/package.json b/js/package.json index 1990f198f..91028fc28 100644 --- a/js/package.json +++ b/js/package.json @@ -122,4 +122,4 @@ }, "./package.json": "./package.json" } -} \ No newline at end of file +} diff --git a/python/langsmith/run_helpers.py b/python/langsmith/run_helpers.py index e7c34d836..c369e44b4 100644 --- a/python/langsmith/run_helpers.py +++ b/python/langsmith/run_helpers.py @@ -4,6 +4,7 @@ import contextlib import contextvars +import datetime import functools import inspect import logging @@ -117,6 +118,7 @@ def _container_end( container: _TraceableContainer, outputs: Optional[Any] = None, error: Optional[str] = None, + events: Optional[List[dict]] = None, ): """End the run.""" run_tree = container.get("new_run") @@ -124,7 +126,7 @@ def _container_end( # Tracing disabled return outputs_ = outputs if isinstance(outputs, dict) else {"output": outputs} - run_tree.end(outputs=outputs_, error=error) + run_tree.end(outputs=outputs_, error=error, events=events) run_tree.patch() @@ -350,6 +352,7 @@ async def async_wrapper( async def async_generator_wrapper( *args: Any, langsmith_extra: Optional[LangSmithExtra] = None, **kwargs: Any ) -> AsyncGenerator: + events: List[dict] = [] context_run = _PARENT_RUN_TREE.get() run_container = _setup_run( func, @@ -385,11 +388,21 @@ async def async_generator_wrapper( if inspect.iscoroutine(async_gen_result): async_gen_result = await async_gen_result async for item in async_gen_result: + if run_type == "llm": + events.append( + { + "name": "new_token", + "time": datetime.datetime.now( + datetime.timezone.utc + ).isoformat(), + "kwargs": {"token": item}, + }, + ) results.append(item) yield item except BaseException as e: stacktrace = traceback.format_exc() - _container_end(run_container, error=stacktrace) + _container_end(run_container, error=stacktrace, events=events) raise e finally: _PARENT_RUN_TREE.set(context_run) @@ -407,7 +420,7 @@ async def async_generator_wrapper( function_result = results else: function_result = None - _container_end(run_container, outputs=function_result) + _container_end(run_container, outputs=function_result, events=events) @functools.wraps(func) def wrapper( @@ -456,6 +469,7 @@ def generator_wrapper( *args: Any, langsmith_extra: Optional[LangSmithExtra] = None, **kwargs: Any ) -> Any: context_run = _PARENT_RUN_TREE.get() + events: List[dict] = [] run_container = _setup_run( func, run_type=run_type, @@ -483,6 +497,16 @@ def generator_wrapper( # around this. generator_result = func(*args, **kwargs) for item in generator_result: + if run_type == "llm": + events.append( + { + "name": "new_token", + "time": datetime.datetime.now( + datetime.timezone.utc + ).isoformat(), + "kwargs": {"token": item}, + }, + ) results.append(item) try: yield item @@ -490,7 +514,7 @@ def generator_wrapper( break except BaseException as e: stacktrace = traceback.format_exc() - _container_end(run_container, error=stacktrace) + _container_end(run_container, error=stacktrace, events=events) raise e finally: _PARENT_RUN_TREE.set(context_run) @@ -508,7 +532,7 @@ def generator_wrapper( function_result = results else: function_result = None - _container_end(run_container, outputs=function_result) + _container_end(run_container, outputs=function_result, events=events) if inspect.isasyncgenfunction(func): selected_wrapper: Callable = async_generator_wrapper diff --git a/python/langsmith/run_trees.py b/python/langsmith/run_trees.py index 7f10a820a..5d4e91277 100644 --- a/python/langsmith/run_trees.py +++ b/python/langsmith/run_trees.py @@ -73,8 +73,6 @@ def infer_defaults(cls, values: dict) -> dict: values["trace_id"] = values["parent_run"].trace_id else: values["trace_id"] = values["id"] - else: - print(values["trace_id"]) cast(dict, values.setdefault("extra", {})) return values @@ -100,6 +98,7 @@ def end( outputs: Optional[Dict] = None, error: Optional[str] = None, end_time: Optional[datetime] = None, + events: Optional[List[Dict]] = None, ) -> None: """Set the end time of the run and all child runs.""" self.end_time = end_time or datetime.now(timezone.utc) @@ -107,6 +106,8 @@ def end( self.outputs = outputs if error is not None: self.error = error + if events is not None: + self.events = events def create_child( self, @@ -181,6 +182,9 @@ def patch(self) -> None: end_time=self.end_time, dotted_order=self.dotted_order, trace_id=self.trace_id, + events=self.events, + tags=self.tags, + extra=self.extra, ) def wait(self) -> None: From 878ba0785d229aa19477c7d8913ea78bac8edda1 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:27:37 -0800 Subject: [PATCH 20/25] Wfh/release (#436) --- js/package.json | 4 ++-- js/src/index.ts | 2 +- python/pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/js/package.json b/js/package.json index 91028fc28..1f7f7674f 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "langsmith", - "version": "0.0.69-rc0", + "version": "0.0.69", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", "packageManager": "yarn@1.22.19", "files": [ @@ -122,4 +122,4 @@ }, "./package.json": "./package.json" } -} +} \ No newline at end of file diff --git a/js/src/index.ts b/js/src/index.ts index ed87a38c6..0a9df5dae 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -5,4 +5,4 @@ export { Dataset, Example, TracerSession, Run, Feedback } from "./schemas.js"; export { RunTree, RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version -export const __version__ = "0.0.69-rc0"; +export const __version__ = "0.0.69"; diff --git a/python/pyproject.toml b/python/pyproject.toml index c14eb922f..9a9c77260 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.90" +version = "0.0.91" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" From dc8e7db9eedcd8e235ded18fbc6c2dee1be76ccb Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Wed, 14 Feb 2024 13:19:35 -0800 Subject: [PATCH 21/25] Add multi project query support (OR) (#439) --- js/package.json | 4 +- js/src/client.ts | 71 ++++- js/src/tests/batch_client.int.test.ts | 4 + js/src/tests/client.int.test.ts | 2 +- js/src/tests/run_trees.int.test.ts | 89 ++++++ js/src/utils/async_caller.ts | 4 +- js/yarn.lock | 308 ++------------------ python/langsmith/client.py | 24 +- python/tests/integration_tests/test_runs.py | 29 ++ 9 files changed, 237 insertions(+), 298 deletions(-) diff --git a/js/package.json b/js/package.json index 1f7f7674f..da4695470 100644 --- a/js/package.json +++ b/js/package.json @@ -62,6 +62,7 @@ "devDependencies": { "@babel/preset-env": "^7.22.4", "@jest/globals": "^29.5.0", + "@langchain/core": "^0.1.28", "@tsconfig/recommended": "^1.0.2", "@types/jest": "^29.5.1", "@typescript-eslint/eslint-plugin": "^5.59.8", @@ -75,7 +76,6 @@ "eslint-plugin-no-instanceof": "^1.0.1", "eslint-plugin-prettier": "^4.2.1", "jest": "^29.5.0", - "langchain": "^0.0.147", "prettier": "^2.8.8", "ts-jest": "^29.1.0", "ts-node": "^10.9.1", @@ -122,4 +122,4 @@ }, "./package.json": "./package.json" } -} \ No newline at end of file +} diff --git a/js/src/client.ts b/js/src/client.ts index 9770dc607..dcf0fbb7f 100644 --- a/js/src/client.ts +++ b/js/src/client.ts @@ -45,8 +45,8 @@ interface ClientConfig { } interface ListRunsParams { - projectId?: string; - projectName?: string; + projectId?: string | string[]; + projectName?: string | string[]; executionOrder?: number; parentRunId?: string; referenceExampleId?: string; @@ -818,15 +818,23 @@ export class Client { filter, limit, }: ListRunsParams): AsyncIterable { - let projectId_ = projectId; + let projectIds: string[] = []; + if (projectId) { + projectIds = Array.isArray(projectId) ? projectId : [projectId]; + } if (projectName) { - if (projectId) { - throw new Error("Only one of projectId or projectName may be given"); - } - projectId_ = (await this.readProject({ projectName })).id; + const projectNames = Array.isArray(projectName) + ? projectName + : [projectName]; + const projectIds_ = await Promise.all( + projectNames.map((name) => + this.readProject({ projectName: name }).then((project) => project.id) + ) + ); + projectIds.push(...projectIds_); } const body = { - session: projectId_ ? [projectId_] : null, + session: projectIds.length ? projectIds : null, run_type: runType, reference_example: referenceExampleId, query, @@ -1110,6 +1118,53 @@ export class Client { return result as TracerSession; } + public async hasProject({ + projectId, + projectName, + }: { + projectId?: string; + projectName?: string; + }): Promise { + // TODO: Add a head request + let path = "/sessions"; + const params = new URLSearchParams(); + if (projectId !== undefined && projectName !== undefined) { + throw new Error("Must provide either projectName or projectId, not both"); + } else if (projectId !== undefined) { + assertUuid(projectId); + path += `/${projectId}`; + } else if (projectName !== undefined) { + params.append("name", projectName); + } else { + throw new Error("Must provide projectName or projectId"); + } + const response = await this.caller.call( + fetch, + `${this.apiUrl}${path}?${params}`, + { + method: "GET", + headers: this.headers, + signal: AbortSignal.timeout(this.timeout_ms), + } + ); + // consume the response body to release the connection + // https://undici.nodejs.org/#/?id=garbage-collection + try { + const result = await response.json(); + if (!response.ok) { + return false; + } + // If it's OK and we're querying by name, need to check the list is not empty + if (Array.isArray(result)) { + return result.length > 0; + } + // projectId querying + return true; + } catch (e) { + return false; + } + } + public async readProject({ projectId, projectName, diff --git a/js/src/tests/batch_client.int.test.ts b/js/src/tests/batch_client.int.test.ts index 35589dbb5..51a86a6e9 100644 --- a/js/src/tests/batch_client.int.test.ts +++ b/js/src/tests/batch_client.int.test.ts @@ -58,6 +58,7 @@ test.concurrent( const langchainClient = new Client({ autoBatchTracing: true, callerOptions: { maxRetries: 0 }, + timeout_ms: 30_000, }); const projectName = "__test_persist_update_run_batch_1"; await deleteProject(langchainClient, projectName); @@ -97,6 +98,7 @@ test.concurrent( autoBatchTracing: true, callerOptions: { maxRetries: 0 }, pendingAutoBatchedRunLimit: 2, + timeout_ms: 30_000, }); const projectName = "__test_persist_update_run_batch_above_bs_limit"; await deleteProject(langchainClient, projectName); @@ -141,6 +143,7 @@ test.concurrent( const langchainClient = new Client({ autoBatchTracing: true, callerOptions: { maxRetries: 0 }, + timeout_ms: 30_000, }); const projectName = "__test_persist_update_run_batch_with_delay"; await deleteProject(langchainClient, projectName); @@ -181,6 +184,7 @@ test.concurrent( const langchainClient = new Client({ autoBatchTracing: true, callerOptions: { maxRetries: 0 }, + timeout_ms: 30_000, }); const projectName = "__test_persist_update_run_tree"; await deleteProject(langchainClient, projectName); diff --git a/js/src/tests/client.int.test.ts b/js/src/tests/client.int.test.ts index b632674b6..c52b54d54 100644 --- a/js/src/tests/client.int.test.ts +++ b/js/src/tests/client.int.test.ts @@ -1,5 +1,5 @@ import { Dataset, Run } from "../schemas.js"; -import { FunctionMessage, HumanMessage } from "langchain/schema"; +import { FunctionMessage, HumanMessage } from "@langchain/core/messages"; import { Client } from "../client.js"; import { v4 as uuidv4 } from "uuid"; diff --git a/js/src/tests/run_trees.int.test.ts b/js/src/tests/run_trees.int.test.ts index 110f3c15d..af9572802 100644 --- a/js/src/tests/run_trees.int.test.ts +++ b/js/src/tests/run_trees.int.test.ts @@ -28,6 +28,25 @@ async function waitUntil( throw new Error("Timeout"); } +async function pollRunsUntilCount( + client: Client, + projectName: string, + count: number +): Promise { + await waitUntil( + async () => { + try { + const runs = await toArray(client.listRuns({ projectName })); + return runs.length === count; + } catch (e) { + return false; + } + }, + 120_000, // Wait up to 120 seconds + 3000 // every 3 second + ); +} + test.concurrent( "Test post and patch run", async () => { @@ -130,3 +149,73 @@ test.concurrent( }, 120_000 ); + +test.concurrent( + "Test list runs multi project", + async () => { + const projectNames = [ + "__My JS Tracer Project - test_list_runs_multi_project", + "__My JS Tracer Project - test_list_runs_multi_project2", + ]; + + try { + const langchainClient = new Client({ timeout_ms: 30000 }); + + for (const project of projectNames) { + if (await langchainClient.hasProject({ projectName: project })) { + await langchainClient.deleteProject({ projectName: project }); + } + } + + const parentRunConfig: RunTreeConfig = { + name: "parent_run", + inputs: { text: "hello world" }, + project_name: projectNames[0], + client: langchainClient, + }; + + const parent_run = new RunTree(parentRunConfig); + await parent_run.postRun(); + await parent_run.end({ output: "Completed: foo" }); + await parent_run.patchRun(); + + const parentRunConfig2: RunTreeConfig = { + name: "parent_run", + inputs: { text: "hello world" }, + project_name: projectNames[1], + client: langchainClient, + }; + + const parent_run2 = new RunTree(parentRunConfig2); + await parent_run2.postRun(); + await parent_run2.end({ output: "Completed: foo" }); + await parent_run2.patchRun(); + await pollRunsUntilCount(langchainClient, projectNames[0], 1); + await pollRunsUntilCount(langchainClient, projectNames[1], 1); + + const runsIter = langchainClient.listRuns({ + projectName: projectNames, + }); + const runs = await toArray(runsIter); + + expect(runs.length).toBe(2); + expect( + runs.every((run) => run?.outputs?.["output"] === "Completed: foo") + ).toBe(true); + expect(runs[0].session_id).not.toBe(runs[1].session_id); + } finally { + const langchainClient = new Client(); + + for (const project of projectNames) { + if (await langchainClient.hasProject({ projectName: project })) { + try { + await langchainClient.deleteProject({ projectName: project }); + } catch (e) { + console.debug(e); + } + } + } + } + }, + 120_000 +); diff --git a/js/src/utils/async_caller.ts b/js/src/utils/async_caller.ts index f944c4d47..731909891 100644 --- a/js/src/utils/async_caller.ts +++ b/js/src/utils/async_caller.ts @@ -100,10 +100,10 @@ export class AsyncCaller { } } }, - retries: this.maxRetries, - randomize: true, // If needed we can change some of the defaults here, // but they're quite sensible. + retries: this.maxRetries, + randomize: true, } ), { throwOnTimeout: true } diff --git a/js/yarn.lock b/js/yarn.lock index ba8acc011..4456625d0 100644 --- a/js/yarn.lock +++ b/js/yarn.lock @@ -10,20 +10,6 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@anthropic-ai/sdk@^0.6.2": - version "0.6.2" - resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.6.2.tgz#4be415e6b1d948df6f8e03af84aedf102ec74b70" - integrity sha512-fB9PUj9RFT+XjkL+E9Ol864ZIJi+1P8WnbHspN3N3/GK2uSzjd0cbVIKTGgf4v3N8MwaQu+UWnU7C4BG/fap/g== - dependencies: - "@types/node" "^18.11.18" - "@types/node-fetch" "^2.6.4" - abort-controller "^3.0.0" - agentkeepalive "^4.2.1" - digest-fetch "^1.3.0" - form-data-encoder "1.7.2" - formdata-node "^4.3.2" - node-fetch "^2.6.7" - "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.21.4": version "7.21.4" resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.21.4.tgz" @@ -1395,6 +1381,23 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@langchain/core@^0.1.28": + version "0.1.28" + resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.1.28.tgz#07831383687bb157fc41253fad70274a15fd142d" + integrity sha512-8f4VUCO2cIoGQGLc9SiiGyb4JsIAVKs1b/qtOj1WvxD3Z8W93lYFsuSEpFdyppkimx8N+MpDdWG+Nd/9RJ3Xyg== + dependencies: + ansi-styles "^5.0.0" + camelcase "6" + decamelize "1.2.0" + js-tiktoken "^1.0.8" + langsmith "~0.0.48" + ml-distance "^4.0.0" + p-queue "^6.6.2" + p-retry "4" + uuid "^9.0.0" + zod "^3.22.4" + zod-to-json-schema "^3.22.3" + "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" @@ -1537,24 +1540,11 @@ resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== -"@types/node-fetch@^2.6.4": - version "2.6.4" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.4.tgz#1bc3a26de814f6bf466b25aeb1473fa1afe6a660" - integrity sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg== - dependencies: - "@types/node" "*" - form-data "^3.0.0" - "@types/node@*": version "20.2.5" resolved "https://registry.npmjs.org/@types/node/-/node-20.2.5.tgz" integrity sha512-JJulVEQXmiY9Px5axXHeYGLSjhkZEnD+MDPDGbCbIAbMslkKwmygtZFy1X6s/075Yo94sf8GuSlFfPzysQrWZQ== -"@types/node@^18.11.18": - version "18.17.15" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.17.15.tgz#31301a273b9ca7d568fe6d1c35ae52e0fb3f8d6a" - integrity sha512-2yrWpBk32tvV/JAd3HNHWuZn/VDN1P+72hWirHnvsvTGSqbANi+kSeuQR9yAHnbvaBvHDsoTdXV0Fe+iRtHLKA== - "@types/prettier@^2.1.5": version "2.7.2" resolved "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.2.tgz" @@ -1676,13 +1666,6 @@ "@typescript-eslint/types" "5.59.8" eslint-visitor-keys "^3.3.0" -abort-controller@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" - integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== - dependencies: - event-target-shim "^5.0.0" - acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" @@ -1698,13 +1681,6 @@ acorn@^8.4.1, acorn@^8.8.0: resolved "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz" integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== -agentkeepalive@^4.2.1: - version "4.5.0" - resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" - integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== - dependencies: - humanize-ms "^1.2.1" - ajv@^6.10.0, ajv@^6.12.4: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" @@ -1815,11 +1791,6 @@ array.prototype.flatmap@^1.3.1: es-abstract "^1.20.4" es-shim-unscopables "^1.0.0" -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - available-typed-arrays@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz" @@ -1914,21 +1885,11 @@ balanced-match@^1.0.0: resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base-64@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/base-64/-/base-64-0.1.0.tgz#780a99c84e7d600260361511c4877613bf24f6bb" - integrity sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA== - base64-js@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== -binary-extensions@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - binary-search@^1.3.5: version "1.3.6" resolved "https://registry.yarnpkg.com/binary-search/-/binary-search-1.3.6.tgz#e32426016a0c5092f0f3598836a1c7da3560565c" @@ -2028,11 +1989,6 @@ char-regex@^1.0.2: resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz" integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== -charenc@0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667" - integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA== - ci-info@^3.2.0: version "3.8.0" resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz" @@ -2086,13 +2042,6 @@ color-name@~1.1.4: resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - commander@^10.0.1: version "10.0.1" resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" @@ -2141,11 +2090,6 @@ cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -crypt@0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" - integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow== - debug@^3.2.7: version "3.2.7" resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" @@ -2160,7 +2104,7 @@ debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: dependencies: ms "2.1.2" -decamelize@^1.2.0: +decamelize@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== @@ -2188,11 +2132,6 @@ define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0: has-property-descriptors "^1.0.0" object-keys "^1.1.1" -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz" @@ -2208,14 +2147,6 @@ diff@^4.0.1: resolved "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== -digest-fetch@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/digest-fetch/-/digest-fetch-1.3.0.tgz#898e69264d00012a23cf26e8a3e40320143fc661" - integrity sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA== - dependencies: - base-64 "^0.1.0" - md5 "^2.3.0" - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" @@ -2512,11 +2443,6 @@ esutils@^2.0.2: resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -event-target-shim@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" - integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== - eventemitter3@^4.0.4: version "4.0.7" resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" @@ -2553,11 +2479,6 @@ expect@^29.0.0, expect@^29.5.0: jest-message-util "^29.5.0" jest-util "^29.5.0" -expr-eval@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/expr-eval/-/expr-eval-2.0.2.tgz#fa6f044a7b0c93fde830954eb9c5b0f7fbc7e201" - integrity sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg== - fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" @@ -2641,11 +2562,6 @@ flat-cache@^3.0.4: flatted "^3.1.0" rimraf "^3.0.2" -flat@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" - integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== - flatted@^3.1.0: version "3.2.7" resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz" @@ -2658,28 +2574,6 @@ for-each@^0.3.3: dependencies: is-callable "^1.1.3" -form-data-encoder@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" - integrity sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A== - -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -formdata-node@^4.3.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-4.4.1.tgz#23f6a5cb9cb55315912cbec4ff7b0f59bbd191e2" - integrity sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ== - dependencies: - node-domexception "1.0.0" - web-streams-polyfill "4.0.0-beta.3" - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" @@ -2883,13 +2777,6 @@ human-signals@^2.1.0: resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== -humanize-ms@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" - integrity sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ== - dependencies: - ms "^2.0.0" - ignore@^5.2.0: version "5.2.4" resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz" @@ -2972,11 +2859,6 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-buffer@~1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" @@ -3500,10 +3382,10 @@ jest@^29.5.0: import-local "^3.0.2" jest-cli "^29.5.0" -js-tiktoken@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/js-tiktoken/-/js-tiktoken-1.0.7.tgz#56933fcd2093e8304060dfde3071bda91812e6f5" - integrity sha512-biba8u/clw7iesNEWLOLwrNGoBP2lA+hTaBLs/D45pJdUPFXyxD6nhcDVtADChghv4GgyAiMKYMiRx7x6h7Biw== +js-tiktoken@^1.0.8: + version "1.0.10" + resolved "https://registry.yarnpkg.com/js-tiktoken/-/js-tiktoken-1.0.10.tgz#2b343ec169399dcee8f9ef9807dbd4fafd3b30dc" + integrity sha512-ZoSxbGjvGyMT13x6ACo9ebhDha/0FHdKA+OsQcMOWcm1Zs7r90Rhk5lhERLzji+3rA7EKpXCgwXcM5fF3DMpdA== dependencies: base64-js "^1.5.1" @@ -3564,53 +3446,15 @@ json5@^2.2.2, json5@^2.2.3: resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== -jsonpointer@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" - integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== - kleur@^3.0.3: version "3.0.3" resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== -langchain@^0.0.147: - version "0.0.147" - resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.0.147.tgz#5ba4cd94be0c5e661f90f2715b8ea57fec08e89c" - integrity sha512-4PjOQMKd2VfUPsGuOUA/Dbi5WfvX0aE1Oqex2lwsP9l1d7Xibf3GAYHiWwFG+wTwgrsnHhVyTD3FvpjA/T1TGw== - dependencies: - "@anthropic-ai/sdk" "^0.6.2" - ansi-styles "^5.0.0" - binary-extensions "^2.2.0" - camelcase "6" - decamelize "^1.2.0" - expr-eval "^2.0.2" - flat "^5.0.2" - js-tiktoken "^1.0.7" - js-yaml "^4.1.0" - jsonpointer "^5.0.1" - langchainhub "~0.0.6" - langsmith "~0.0.31" - ml-distance "^4.0.0" - object-hash "^3.0.0" - openai "~4.4.0" - openapi-types "^12.1.3" - p-queue "^6.6.2" - p-retry "4" - uuid "^9.0.0" - yaml "^2.2.1" - zod "^3.21.4" - zod-to-json-schema "^3.20.4" - -langchainhub@~0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.6.tgz#9d2d06e4ce0807b4e8a31e19611f57aef990b54d" - integrity sha512-SW6105T+YP1cTe0yMf//7kyshCgvCTyFBMTgH2H3s9rTAR4e+78DA/BBrUL/Mt4Q5eMWui7iGuAYb3pgGsdQ9w== - -langsmith@~0.0.31: - version "0.0.35" - resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.0.35.tgz#5ccb388b671ad94660292f4d99f823642831d4d8" - integrity sha512-3EItwg4fPKE8xWl7TxbnjQgxiHT5WoffsvJYh06ml/otI7zrLoY1cZzdJJTN4bkiCRnPdd3uvV6UVjtZN9MMgA== +langsmith@~0.0.48: + version "0.0.68" + resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.0.68.tgz#8748d3203d348cc19e5ee4ddeef908964a62e21a" + integrity sha512-bxaJndEhUFDfv5soWKxONrLMZaVZfS+G4smJl3WYQlsEph8ierG3QbJfx1PEwl40TD0aFBjzq62usUX1UOCjuA== dependencies: "@types/uuid" "^9.0.1" commander "^10.0.1" @@ -3698,15 +3542,6 @@ makeerror@1.0.12: dependencies: tmpl "1.0.5" -md5@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f" - integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g== - dependencies: - charenc "0.0.2" - crypt "0.0.2" - is-buffer "~1.1.6" - merge-stream@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" @@ -3725,18 +3560,6 @@ micromatch@^4.0.4: braces "^3.0.2" picomatch "^2.3.1" -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" @@ -3795,11 +3618,6 @@ ms@2.1.2, ms@^2.1.1: resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.0.0: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - natural-compare-lite@^1.4.0: version "1.4.0" resolved "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz" @@ -3810,18 +3628,6 @@ natural-compare@^1.4.0: resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== -node-domexception@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" - integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== - -node-fetch@^2.6.7: - version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" - integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== - dependencies: - whatwg-url "^5.0.0" - node-int64@^0.4.0: version "0.4.0" resolved "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz" @@ -3849,11 +3655,6 @@ num-sort@^2.0.0: resolved "https://registry.yarnpkg.com/num-sort/-/num-sort-2.1.0.tgz#1cbb37aed071329fdf41151258bc011898577a9b" integrity sha512-1MQz1Ed8z2yckoBeSfkQHHO9K1yDRxxtotKSJ9yvcTUUxSvfvzEq5GwBrjjHEpMlq/k5gvXdmJ1SbYxWtpNoVg== -object-hash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" - integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== - object-inspect@^1.12.3, object-inspect@^1.9.0: version "1.12.3" resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz" @@ -3897,25 +3698,6 @@ onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -openai@~4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/openai/-/openai-4.4.0.tgz#dbaab326eb044ddec479951b245850c482678031" - integrity sha512-JN0t628Kh95T0IrXl0HdBqnlJg+4Vq0Bnh55tio+dfCnyzHvMLiWyCM9m726MAJD2YkDU4/8RQB6rNbEq9ct2w== - dependencies: - "@types/node" "^18.11.18" - "@types/node-fetch" "^2.6.4" - abort-controller "^3.0.0" - agentkeepalive "^4.2.1" - digest-fetch "^1.3.0" - form-data-encoder "1.7.2" - formdata-node "^4.3.2" - node-fetch "^2.6.7" - -openapi-types@^12.1.3: - version "12.1.3" - resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" - integrity sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw== - optionator@^0.9.1: version "0.9.1" resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz" @@ -4429,11 +4211,6 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== - ts-jest@^29.1.0: version "29.1.0" resolved "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.0.tgz" @@ -4599,24 +4376,6 @@ walker@^1.0.8: dependencies: makeerror "1.0.12" -web-streams-polyfill@4.0.0-beta.3: - version "4.0.0-beta.3" - resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" - integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - which-boxed-primitive@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" @@ -4689,11 +4448,6 @@ yallist@^4.0.0: resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== -yaml@^2.2.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.2.tgz#f522db4313c671a0ca963a75670f1c12ea909144" - integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg== - yargs-parser@^21.0.1, yargs-parser@^21.1.1: version "21.1.1" resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz" @@ -4722,12 +4476,12 @@ yocto-queue@^0.1.0: resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== -zod-to-json-schema@^3.20.4: - version "3.21.4" - resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.21.4.tgz#de97c5b6d4a25e9d444618486cb55c0c7fb949fd" - integrity sha512-fjUZh4nQ1s6HMccgIeE0VP4QG/YRGPmyjO9sAh890aQKPEk3nqbfUXhMFaC+Dr5KvYBm8BCyvfpZf2jY9aGSsw== +zod-to-json-schema@^3.22.3: + version "3.22.4" + resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.22.4.tgz#f8cc691f6043e9084375e85fb1f76ebafe253d70" + integrity sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ== -zod@^3.21.4: +zod@^3.22.4: version "3.22.4" resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff" integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg== diff --git a/python/langsmith/client.py b/python/langsmith/client.py index f77d6373b..169e995b6 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -1239,8 +1239,8 @@ def read_run( def list_runs( self, *, - project_id: Optional[ID_TYPE] = None, - project_name: Optional[str] = None, + project_id: Optional[Union[ID_TYPE, Sequence[ID_TYPE]]] = None, + project_name: Optional[Union[str, Sequence[str]]] = None, run_type: Optional[str] = None, reference_example_id: Optional[ID_TYPE] = None, query: Optional[str] = None, @@ -1257,9 +1257,9 @@ def list_runs( Parameters ---------- project_id : UUID or None, default=None - The ID of the project to filter by. + The ID(s) of the project to filter by. project_name : str or None, default=None - The name of the project to filter by. + The name(s) of the project to filter by. run_type : str or None, default=None The type of the runs to filter by. reference_example_id : UUID or None, default=None @@ -1288,12 +1288,20 @@ def list_runs( Run The runs. """ + project_ids = [] + if isinstance(project_id, (uuid.UUID, str)): + project_ids.append(project_id) + elif isinstance(project_id, list): + project_ids.extend(project_id) if project_name is not None: - if project_id is not None: - raise ValueError("Only one of project_id or project_name may be given") - project_id = self.read_project(project_name=project_name).id + if isinstance(project_name, str): + project_name = [project_name] + project_ids.extend( + [self.read_project(project_name=name).id for name in project_name] + ) + body_query: Dict[str, Any] = { - "session": [project_id] if project_id else None, + "session": project_ids if project_ids else None, "run_type": run_type, "reference_example": ( [reference_example_id] if reference_example_id else None diff --git a/python/tests/integration_tests/test_runs.py b/python/tests/integration_tests/test_runs.py index b23451c76..d04512271 100644 --- a/python/tests/integration_tests/test_runs.py +++ b/python/tests/integration_tests/test_runs.py @@ -99,6 +99,35 @@ def my_chain_run(text: str): pass +async def test_list_runs_multi_project(langchain_client: Client): + project_names = [ + "__My Tracer Project - test_list_runs_multi_project", + "__My Tracer Project - test_list_runs_multi_project2", + ] + try: + for project_name in project_names: + if langchain_client.has_project(project_name): + langchain_client.delete_project(project_name=project_name) + + @traceable(run_type="chain") + async def my_run(text: str): + return "Completed: " + text + + for project_name in project_names: + await my_run("foo", langsmith_extra=dict(project_name=project_name)) + poll_runs_until_count(langchain_client, project_names[0], 1) + poll_runs_until_count(langchain_client, project_names[1], 1) + runs = list(langchain_client.list_runs(project_name=project_names)) + assert len(runs) == 2 + assert all([run.outputs["output"] == "Completed: foo" for run in runs]) # type: ignore + assert runs[0].session_id != runs[1].session_id + + finally: + for project_name in project_names: + if langchain_client.has_project(project_name): + langchain_client.delete_project(project_name=project_name) + + async def test_nested_async_runs(langchain_client: Client): """Test nested runs with a mix of async and sync functions.""" project_name = "__My Tracer Project - test_nested_async_runs" From a9a12e98f20e7ac10b9526545e2a1f3a40e22b12 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Wed, 14 Feb 2024 14:27:02 -0800 Subject: [PATCH 22/25] Release w/ updates to listRuns (#440) --- .github/workflows/integration_tests.yml | 21 ++++++++++++++++ .github/workflows/release_js.yml | 10 ++++++++ js/package.json | 5 ++-- js/scripts/check-npm-version.js | 27 +++++++++++++++++++++ js/scripts/check-version.js | 1 - js/src/client.ts | 3 +++ js/src/index.ts | 2 +- js/src/tests/client.int.test.ts | 16 ++++++------ js/src/tests/run_trees.int.test.ts | 14 +++++++++++ python/langsmith/client.py | 4 +++ python/pyproject.toml | 2 +- python/tests/integration_tests/test_runs.py | 6 +++++ 12 files changed, 98 insertions(+), 13 deletions(-) create mode 100644 js/scripts/check-npm-version.js diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml index 6f0943ddd..b8ff3f553 100644 --- a/.github/workflows/integration_tests.yml +++ b/.github/workflows/integration_tests.yml @@ -4,11 +4,28 @@ on: push: branches: - main + pull_request: + branches: + - main + types: [opened, synchronize, reopened, labeled, unlabeled] workflow_dispatch: + inputs: + run-python-tests: + description: "Run Python integration tests" + default: "true" + required: false + run-js-tests: + description: "Run JS integration tests" + default: "true" + required: false jobs: python_integration_test: name: Python Integration Test + if: > + github.event_name == 'push' || + (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'release')) || + (github.event_name == 'workflow_dispatch' && ${{ github.event.inputs.run-python-tests == 'true' }}) runs-on: ubuntu-20.04 defaults: run: @@ -33,6 +50,10 @@ jobs: js_integration_test: name: JS Integration Test + if: > + github.event_name == 'push' || + (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'release')) || + (github.event_name == 'workflow_dispatch' && ${{ github.event.inputs.run-js-tests == 'true' }}) runs-on: ubuntu-20.04 defaults: run: diff --git a/.github/workflows/release_js.yml b/.github/workflows/release_js.yml index 7083786f1..778b5c948 100644 --- a/.github/workflows/release_js.yml +++ b/.github/workflows/release_js.yml @@ -31,7 +31,17 @@ jobs: run: cd js && yarn run build - name: Check version run: cd js && yarn run check-version + - name: Check NPM version + id: check_npm_version + run: | + cd js + if yarn run check-npm-version; then + echo "::set-output name=should_publish::true" + else + echo "::set-output name=should_publish::false" + fi - name: Publish package to NPM + if: steps.check_npm_version.outputs.should_publish == 'true' run: | cd js echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > .npmrc diff --git a/js/package.json b/js/package.json index da4695470..a2a9ce837 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "langsmith", - "version": "0.0.69", + "version": "0.0.70", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", "packageManager": "yarn@1.22.19", "files": [ @@ -31,6 +31,7 @@ "build": "yarn clean && yarn build:esm && yarn build:cjs && node scripts/create-entrypoints.js && node scripts/create-cli.js", "bump-version": "node scripts/bump-version.js", "check-version": "node scripts/check-version.js", + "check-npm-version": "node scripts/check-npm-version.js", "clean": "rm -rf dist/ && node scripts/create-entrypoints.js clean", "build:esm": "tsc --outDir dist/ && rm -rf dist/tests dist/**/tests", "build:cjs": "tsc --outDir dist-cjs/ -p tsconfig.cjs.json && node scripts/move-cjs-to-dist.js && rm -r dist-cjs", @@ -122,4 +123,4 @@ }, "./package.json": "./package.json" } -} +} \ No newline at end of file diff --git a/js/scripts/check-npm-version.js b/js/scripts/check-npm-version.js new file mode 100644 index 000000000..14c4c1f99 --- /dev/null +++ b/js/scripts/check-npm-version.js @@ -0,0 +1,27 @@ +import { execSync } from 'child_process'; +import fs from 'fs'; +import { fileURLToPath } from 'url'; +import path from 'path'; + +// Convert the URL to a file path +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Adjust the path to your package.json as necessary +const packageJsonPath = path.join(__dirname, '../package.json'); +const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, { encoding: 'utf-8' })); +const { version } = packageJson; +const { name: packageName } = packageJson; + +try { + const npmVersion = execSync(`npm view ${packageName} version`, { encoding: 'utf-8' }).trim(); + if (npmVersion && version <= npmVersion) { + console.error(`Current version ${version} is not greater than npm version ${npmVersion}.`); + process.exit(1); // Exit with error + } else { + console.log(`Current version ${version} is greater than npm version ${npmVersion}. Proceeding with publish.`); + } +} catch (error) { + console.error('Error checking version:', error); + process.exit(1); // Exit with error if the check fails +} diff --git a/js/scripts/check-version.js b/js/scripts/check-version.js index 70642b576..7b81c7f0e 100644 --- a/js/scripts/check-version.js +++ b/js/scripts/check-version.js @@ -1,5 +1,4 @@ import { readFileSync } from "fs"; -import { join } from "path"; const indexFilePath = "src/index.ts"; const packageJson = JSON.parse(readFileSync("package.json")); let indexFileContent = readFileSync(indexFilePath, "utf-8"); diff --git a/js/src/client.ts b/js/src/client.ts index dcf0fbb7f..743899c0d 100644 --- a/js/src/client.ts +++ b/js/src/client.ts @@ -47,6 +47,7 @@ interface ClientConfig { interface ListRunsParams { projectId?: string | string[]; projectName?: string | string[]; + traceId?: string; executionOrder?: number; parentRunId?: string; referenceExampleId?: string; @@ -808,6 +809,7 @@ export class Client { projectId, projectName, parentRunId, + traceId, referenceExampleId, startTime, executionOrder, @@ -845,6 +847,7 @@ export class Client { error, id, limit, + trace: traceId, }; for await (const runs of this._getCursorPaginatedList( diff --git a/js/src/index.ts b/js/src/index.ts index 0a9df5dae..013cbac9c 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -5,4 +5,4 @@ export { Dataset, Example, TracerSession, Run, Feedback } from "./schemas.js"; export { RunTree, RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version -export const __version__ = "0.0.69"; +export const __version__ = "0.0.70"; diff --git a/js/src/tests/client.int.test.ts b/js/src/tests/client.int.test.ts index c52b54d54..bc5a97154 100644 --- a/js/src/tests/client.int.test.ts +++ b/js/src/tests/client.int.test.ts @@ -145,7 +145,7 @@ test.concurrent( "test create dataset", async () => { const langchainClient = new Client({ autoBatchTracing: false }); - const datasetName = "__test_create_dataset"; + const datasetName = "__test_create_dataset JS"; const datasets = await toArray( langchainClient.listDatasets({ datasetName }) ); @@ -202,8 +202,8 @@ test.concurrent( "Test list datasets", async () => { const langchainClient = new Client({ autoBatchTracing: false }); - const datasetName1 = "___TEST dataset1"; - const datasetName2 = "___TEST dataset2"; + const datasetName1 = "___TEST dataset1 JS"; + const datasetName2 = "___TEST dataset2 JS"; await deleteDataset(langchainClient, datasetName1); await deleteDataset(langchainClient, datasetName2); // Create two new datasets @@ -247,7 +247,7 @@ test.concurrent( "Test create feedback with source run", async () => { const langchainClient = new Client({ autoBatchTracing: false }); - const projectName = "__test_create_feedback_with_source_run"; + const projectName = "__test_create_feedback_with_source_run JS"; await deleteProject(langchainClient, projectName); const runId = uuidv4(); await langchainClient.createRun({ @@ -290,7 +290,7 @@ test.concurrent( hideOutputs: true, autoBatchTracing: false, }); - const projectName = "__test_create_run_with_masked_inputs_outputs"; + const projectName = "__test_create_run_with_masked_inputs_outputs JS"; await deleteProject(langchainClient, projectName); const runId = uuidv4(); await langchainClient.createRun({ @@ -344,7 +344,7 @@ test.concurrent( process.env.LANGCHAIN_OTHER_FIELD = "test_other_field"; // eslint-disable-next-line no-process-env process.env.LANGCHAIN_OTHER_KEY = "test_other_key"; - const projectName = "__test_create_run_with_revision_id"; + const projectName = "__test_create_run_with_revision_id JS"; await deleteProject(langchainClient, projectName); const runId = uuidv4(); await langchainClient.createRun({ @@ -396,7 +396,7 @@ describe("createChatExample", () => { it("should convert LangChainBaseMessage objects to examples", async () => { const langchainClient = new Client({ autoBatchTracing: false }); - const datasetName = "__createChatExample-test-dataset"; + const datasetName = "__createChatExample-test-dataset JS"; await deleteDataset(langchainClient, datasetName); const dataset = await langchainClient.createDataset(datasetName); @@ -477,7 +477,7 @@ test.concurrent( "Examples CRUD", async () => { const client = new Client({ autoBatchTracing: false }); - const datasetName = "__test_examples_crud"; + const datasetName = "__test_examples_crud JS"; await deleteDataset(client, datasetName); const dataset = await client.createDataset(datasetName); const example = await client.createExample( diff --git a/js/src/tests/run_trees.int.test.ts b/js/src/tests/run_trees.int.test.ts index af9572802..749ebe2d7 100644 --- a/js/src/tests/run_trees.int.test.ts +++ b/js/src/tests/run_trees.int.test.ts @@ -145,6 +145,20 @@ test.concurrent( runMap.get("parent_run")?.id ); expect(runMap.get("parent_run")?.parent_run_id).toBeNull(); + + const traceRunsIter = langchainClient.listRuns({ + traceId: runs[0].trace_id, + }); + const traceRuns = await toArray(traceRunsIter); + expect(traceRuns.length).toEqual(5); + // Sort by dotted order and assert runs lists are equal + const sortedRuns = runs.sort((a, b) => + (a?.dotted_order ?? "").localeCompare(b?.dotted_order ?? "") + ); + const sortedTraceRuns = traceRuns.sort((a, b) => + (a?.dotted_order ?? "").localeCompare(b?.dotted_order ?? "") + ); + expect(sortedRuns).toEqual(sortedTraceRuns); await langchainClient.deleteProject({ projectName }); }, 120_000 diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 169e995b6..62ee0f31c 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -1242,6 +1242,7 @@ def list_runs( project_id: Optional[Union[ID_TYPE, Sequence[ID_TYPE]]] = None, project_name: Optional[Union[str, Sequence[str]]] = None, run_type: Optional[str] = None, + trace_id: Optional[ID_TYPE] = None, reference_example_id: Optional[ID_TYPE] = None, query: Optional[str] = None, filter: Optional[str] = None, @@ -1262,6 +1263,8 @@ def list_runs( The name(s) of the project to filter by. run_type : str or None, default=None The type of the runs to filter by. + trace_id : UUID or None, default=None + The ID of the trace to filter by. reference_example_id : UUID or None, default=None The ID of the reference example to filter by. query : str or None, default=None @@ -1313,6 +1316,7 @@ def list_runs( "start_time": start_time.isoformat() if start_time else None, "error": error, "id": run_ids, + "trace": trace_id, **kwargs, } body_query = {k: v for k, v in body_query.items() if v is not None} diff --git a/python/pyproject.toml b/python/pyproject.toml index 9a9c77260..1bd0cf6cf 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.91" +version = "0.0.92" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" diff --git a/python/tests/integration_tests/test_runs.py b/python/tests/integration_tests/test_runs.py index d04512271..e63b83401 100644 --- a/python/tests/integration_tests/test_runs.py +++ b/python/tests/integration_tests/test_runs.py @@ -224,12 +224,18 @@ async def my_chain_run(text: str, run_tree: RunTree): executor.shutdown(wait=True) poll_runs_until_count(langchain_client, project_name, 17) runs = list(langchain_client.list_runs(project_name=project_name)) + trace_runs = list(langchain_client.list_runs(trace_id=runs[0].trace_id)) + assert len(trace_runs) == 17 assert len(runs) == 17 assert sum([run.run_type == "llm" for run in runs]) == 8 assert sum([run.name == "async_llm" for run in runs]) == 6 assert sum([run.name == "my_llm_run" for run in runs]) == 2 assert sum([run.run_type == "tool" for run in runs]) == 6 assert sum([run.run_type == "chain" for run in runs]) == 3 + # sort by dotted_order + runs = sorted(runs, key=lambda run: run.dotted_order) + trace_runs = sorted(trace_runs, key=lambda run: run.dotted_order) + assert runs == trace_runs # Check that all instances of async_llm have a parent with # the same name (my_tool_run) name_to_ids_map = defaultdict(list) From ebb61b6a0de037083e47e741bacc89487de13849 Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Wed, 14 Feb 2024 23:11:23 -0800 Subject: [PATCH 23/25] JS AsyncLocalStorage tracer (#442) @dqbd @nfcampos @hinthornw --------- Co-authored-by: Tat Dat Duong --- js/.gitignore | 3 + js/package.json | 10 +- js/scripts/create-entrypoints.js | 1 + js/src/index.ts | 10 +- js/src/run_trees.ts | 8 ++ js/src/tests/traceable.int.test.ts | 168 ++++++++++++++++++++++++ js/src/traceable.ts | 204 +++++++++++++++++++++++++++++ js/tsconfig.json | 1 + js/yarn.lock | 6 +- 9 files changed, 405 insertions(+), 6 deletions(-) create mode 100644 js/src/tests/traceable.int.test.ts create mode 100644 js/src/traceable.ts diff --git a/js/.gitignore b/js/.gitignore index 9bdc273aa..a87badf7c 100644 --- a/js/.gitignore +++ b/js/.gitignore @@ -37,6 +37,9 @@ Chinook_Sqlite.sql /run_trees.cjs /run_trees.js /run_trees.d.ts +/traceable.cjs +/traceable.js +/traceable.d.ts /evaluation.cjs /evaluation.js /evaluation.d.ts diff --git a/js/package.json b/js/package.json index a2a9ce837..4eaeea10f 100644 --- a/js/package.json +++ b/js/package.json @@ -11,6 +11,9 @@ "run_trees.cjs", "run_trees.js", "run_trees.d.ts", + "traceable.cjs", + "traceable.js", + "traceable.d.ts", "evaluation.cjs", "evaluation.js", "evaluation.d.ts", @@ -111,6 +114,11 @@ "import": "./run_trees.js", "require": "./run_trees.cjs" }, + "./traceable": { + "types": "./traceable.d.ts", + "import": "./traceable.js", + "require": "./traceable.cjs" + }, "./evaluation": { "types": "./evaluation.d.ts", "import": "./evaluation.js", @@ -123,4 +131,4 @@ }, "./package.json": "./package.json" } -} \ No newline at end of file +} diff --git a/js/scripts/create-entrypoints.js b/js/scripts/create-entrypoints.js index 21290f5cd..ef8ade94f 100644 --- a/js/scripts/create-entrypoints.js +++ b/js/scripts/create-entrypoints.js @@ -9,6 +9,7 @@ import * as path from "path"; const entrypoints = { client: "client", run_trees: "run_trees", + traceable: "traceable", evaluation: "evaluation/index", schemas: "schemas", }; diff --git a/js/src/index.ts b/js/src/index.ts index 013cbac9c..a34d26520 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -1,8 +1,14 @@ export { Client } from "./client.js"; -export { Dataset, Example, TracerSession, Run, Feedback } from "./schemas.js"; +export type { + Dataset, + Example, + TracerSession, + Run, + Feedback, +} from "./schemas.js"; -export { RunTree, RunTreeConfig } from "./run_trees.js"; +export { RunTree, type RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version export const __version__ = "0.0.70"; diff --git a/js/src/run_trees.ts b/js/src/run_trees.ts index 6b98ae661..f081593f1 100644 --- a/js/src/run_trees.ts +++ b/js/src/run_trees.ts @@ -205,3 +205,11 @@ export class RunTree implements BaseRun { await this.client.updateRun(this.id, runUpdate); } } + +export function isRunTree(x?: unknown): x is RunTree { + return ( + x !== undefined && + typeof (x as RunTree).createChild === "function" && + typeof (x as RunTree).postRun === "function" + ); +} diff --git a/js/src/tests/traceable.int.test.ts b/js/src/tests/traceable.int.test.ts new file mode 100644 index 000000000..eaf3c05b8 --- /dev/null +++ b/js/src/tests/traceable.int.test.ts @@ -0,0 +1,168 @@ +import { v4 as uuidv4 } from "uuid"; +// eslint-disable-next-line import/no-extraneous-dependencies +import { FakeStreamingLLM } from "@langchain/core/utils/testing"; +import { Client } from "../client.js"; +import { traceable } from "../traceable.js"; +import { RunTree } from "../run_trees.js"; + +async function deleteProject(langchainClient: Client, projectName: string) { + try { + await langchainClient.readProject({ projectName }); + await langchainClient.deleteProject({ projectName }); + } catch (e) { + // Pass + } +} + +async function waitUntil( + condition: () => Promise, + timeout: number, + interval: number +): Promise { + const start = Date.now(); + while (Date.now() - start < timeout) { + if (await condition()) { + return; + } + await new Promise((resolve) => setTimeout(resolve, interval)); + } + throw new Error("Timeout"); +} + +async function waitUntilRunFound( + client: Client, + runId: string, + checkOutputs = false +) { + return waitUntil( + async () => { + try { + const run = await client.readRun(runId); + if (checkOutputs) { + return ( + run.outputs !== null && + run.outputs !== undefined && + Object.keys(run.outputs).length !== 0 + ); + } + return true; + } catch (e) { + return false; + } + }, + 30_000, + 5_000 + ); +} + +test.concurrent( + "Test traceable wrapper", + async () => { + const langchainClient = new Client({ + callerOptions: { maxRetries: 0 }, + }); + const runId = uuidv4(); + const projectName = "__test_traceable_wrapper"; + const addValueTraceable = traceable( + (a: string, b: number) => { + return a + b; + }, + { + name: "add_value", + project_name: projectName, + client: langchainClient, + id: runId, + } + ); + + expect(await addValueTraceable("testing", 9)).toBe("testing9"); + + await waitUntilRunFound(langchainClient, runId, true); + const storedRun = await langchainClient.readRun(runId); + expect(storedRun.id).toEqual(runId); + + const runId2 = uuidv4(); + const nestedAddValueTraceable = traceable( + (a: string, b: number) => { + return a + b; + }, + { + name: "nested_add_value", + project_name: projectName, + client: langchainClient, + } + ); + const entryTraceable = traceable( + async (complex: { value: string }) => { + const result = await nestedAddValueTraceable(complex.value, 1); + const result2 = await nestedAddValueTraceable(result, 2); + await nestedAddValueTraceable( + new RunTree({ + name: "root_nested_add_value", + project_name: projectName, + client: langchainClient, + }), + result, + 2 + ); + return nestedAddValueTraceable(result2, 3); + }, + { + name: "run_with_nesting", + project_name: projectName, + client: langchainClient, + id: runId2, + } + ); + + expect(await entryTraceable({ value: "testing" })).toBe("testing123"); + + await waitUntilRunFound(langchainClient, runId2, true); + const storedRun2 = await langchainClient.readRun(runId2); + expect(storedRun2.id).toEqual(runId2); + + const runId3 = uuidv4(); + + const llm = new FakeStreamingLLM({ sleep: 0 }); + + const iterableTraceable = traceable(llm.stream.bind(llm), { + name: "iterable_traceable", + project_name: projectName, + client: langchainClient, + id: runId3, + }); + + const chunks = []; + + for await (const chunk of await iterableTraceable("Hello there")) { + chunks.push(chunk); + } + expect(chunks.join("")).toBe("Hello there"); + await waitUntilRunFound(langchainClient, runId3, true); + const storedRun3 = await langchainClient.readRun(runId3); + expect(storedRun3.id).toEqual(runId3); + + await deleteProject(langchainClient, projectName); + + async function overload(a: string, b: number): Promise; + async function overload(config: { a: string; b: number }): Promise; + async function overload( + ...args: [a: string, b: number] | [config: { a: string; b: number }] + ): Promise { + if (args.length === 1) { + return args[0].a + args[0].b; + } + return args[0] + args[1]; + } + + const wrappedOverload = traceable(overload, { + name: "wrapped_overload", + project_name: projectName, + client: langchainClient, + }); + + expect(await wrappedOverload("testing", 123)).toBe("testing123"); + expect(await wrappedOverload({ a: "testing", b: 456 })).toBe("testing456"); + }, + 180_000 +); diff --git a/js/src/traceable.ts b/js/src/traceable.ts new file mode 100644 index 000000000..e31fded3a --- /dev/null +++ b/js/src/traceable.ts @@ -0,0 +1,204 @@ +import { AsyncLocalStorage } from "async_hooks"; + +import { RunTree, RunTreeConfig, isRunTree } from "./run_trees.js"; +import { KVMap } from "./schemas.js"; + +const asyncLocalStorage = new AsyncLocalStorage(); + +export type RunTreeLike = RunTree; + +type WrapArgReturnPair = Pair extends [ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + infer Args extends any[], + infer Return +] + ? { + (...args: Args): Promise; + (...args: [runTree: RunTreeLike, ...rest: Args]): Promise; + } + : never; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +type UnionToIntersection = (U extends any ? (x: U) => void : never) extends ( + x: infer I +) => void + ? I + : never; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type TraceableFunction any> = + // function overloads are represented as intersections rather than unions + // matches the behavior introduced in https://github.com/microsoft/TypeScript/pull/54448 + Func extends { + (...args: infer A1): infer R1; + (...args: infer A2): infer R2; + (...args: infer A3): infer R3; + (...args: infer A4): infer R4; + (...args: infer A5): infer R5; + } + ? UnionToIntersection< + WrapArgReturnPair<[A1, R1] | [A2, R2] | [A3, R3] | [A4, R4] | [A5, R5]> + > + : Func extends { + (...args: infer A1): infer R1; + (...args: infer A2): infer R2; + (...args: infer A3): infer R3; + (...args: infer A4): infer R4; + } + ? UnionToIntersection< + WrapArgReturnPair<[A1, R1] | [A2, R2] | [A3, R3] | [A4, R4]> + > + : Func extends { + (...args: infer A1): infer R1; + (...args: infer A2): infer R2; + (...args: infer A3): infer R3; + } + ? UnionToIntersection> + : Func extends { + (...args: infer A1): infer R1; + (...args: infer A2): infer R2; + } + ? UnionToIntersection> + : Func extends { + (...args: infer A1): infer R1; + } + ? UnionToIntersection> + : never; + +const isAsyncIterable = (x: unknown): x is AsyncIterable => + x != null && + typeof x === "object" && + // eslint-disable-next-line @typescript-eslint/no-explicit-any + typeof (x as any)[Symbol.asyncIterator] === "function"; + +/** + * Higher-order function that takes function as input and returns a + * "TraceableFunction" - a wrapped version of the input that + * automatically handles tracing. If the returned traceable function calls any + * traceable functions, those are automatically traced as well. + * + * The returned TraceableFunction can accept a run tree or run tree config as + * its first argument. If omitted, it will default to the caller's run tree, + * or will be treated as a root run. + * + * @param wrappedFunc Targeted function to be traced + * @param config Additional metadata such as name, tags or providing + * a custom LangSmith client instance + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function traceable any>( + wrappedFunc: Func, + config?: Partial +) { + type Inputs = Parameters; + type Output = ReturnType; + + const traceableFunc = async ( + ...args: Inputs | [RunTreeLike, ...Inputs] + ): Promise => { + let currentRunTree: RunTree; + let rawInputs: Inputs; + + const ensuredConfig: RunTreeConfig = { + name: wrappedFunc.name || "", + ...config, + }; + + const previousRunTree = asyncLocalStorage.getStore(); + if (isRunTree(args[0])) { + currentRunTree = args[0]; + rawInputs = args.slice(1) as Inputs; + } else if (previousRunTree !== undefined) { + currentRunTree = await previousRunTree.createChild(ensuredConfig); + rawInputs = args as Inputs; + } else { + currentRunTree = new RunTree(ensuredConfig); + rawInputs = args as Inputs; + } + + let inputs: KVMap; + const firstInput = rawInputs[0]; + if (firstInput == null) { + inputs = {}; + } else if (rawInputs.length > 1) { + inputs = { args: rawInputs }; + } else if (isKVMap(firstInput)) { + inputs = firstInput; + } else { + inputs = { input: firstInput }; + } + + currentRunTree.inputs = inputs; + + const initialOutputs = currentRunTree.outputs; + const initialError = currentRunTree.error; + await currentRunTree.postRun(); + + return new Promise((resolve, reject) => { + void asyncLocalStorage.run(currentRunTree, async () => { + try { + const rawOutput = await wrappedFunc(...rawInputs); + if (isAsyncIterable(rawOutput)) { + // eslint-disable-next-line no-inner-declarations + async function* wrapOutputForTracing() { + const chunks: unknown[] = []; + // TypeScript thinks this is unsafe + for await (const chunk of rawOutput as AsyncIterable) { + chunks.push(chunk); + yield chunk; + } + await currentRunTree.end({ outputs: chunks }); + await currentRunTree.patchRun(); + } + return resolve(wrapOutputForTracing() as Output); + } else { + const outputs: KVMap = isKVMap(rawOutput) + ? rawOutput + : { outputs: rawOutput }; + + if (initialOutputs === currentRunTree.outputs) { + await currentRunTree.end(outputs); + } else { + currentRunTree.end_time = Date.now(); + } + + await currentRunTree.patchRun(); + return resolve(rawOutput); + } + } catch (error) { + if (initialError === currentRunTree.error) { + await currentRunTree.end(initialOutputs, String(error)); + } else { + currentRunTree.end_time = Date.now(); + } + + await currentRunTree.patchRun(); + reject(error); + } + }); + }); + }; + + Object.defineProperty(wrappedFunc, "langsmith:traceable", { + value: config, + }); + + return traceableFunc as TraceableFunction; +} + +export function isTraceableFunction( + x: unknown + // eslint-disable-next-line @typescript-eslint/no-explicit-any +): x is TraceableFunction { + return typeof x === "function" && "langsmith:traceable" in x; +} + +function isKVMap(x: unknown): x is Record { + return ( + typeof x === "object" && + x != null && + !Array.isArray(x) && + // eslint-disable-next-line no-instanceof/no-instanceof + !(x instanceof Date) + ); +} diff --git a/js/tsconfig.json b/js/tsconfig.json index 5a466ec5a..5edd93e56 100644 --- a/js/tsconfig.json +++ b/js/tsconfig.json @@ -34,6 +34,7 @@ "entryPoints": [ "src/client.ts", "src/run_trees.ts", + "src/traceable.ts", "src/evaluation/index.ts", "src/schemas.ts" ] diff --git a/js/yarn.lock b/js/yarn.lock index 4456625d0..a153e3709 100644 --- a/js/yarn.lock +++ b/js/yarn.lock @@ -3452,9 +3452,9 @@ kleur@^3.0.3: integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== langsmith@~0.0.48: - version "0.0.68" - resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.0.68.tgz#8748d3203d348cc19e5ee4ddeef908964a62e21a" - integrity sha512-bxaJndEhUFDfv5soWKxONrLMZaVZfS+G4smJl3WYQlsEph8ierG3QbJfx1PEwl40TD0aFBjzq62usUX1UOCjuA== + version "0.0.70" + resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.0.70.tgz#797be2b26da18843a94a802b6a73c91b72e8042b" + integrity sha512-QFHrzo/efBowGPCxtObv7G40/OdwqQfGshavMbSJtHBgX+OMqnn4lCMqVeEwTdyue4lEcpwAsGNg5Vty91YIyw== dependencies: "@types/uuid" "^9.0.1" commander "^10.0.1" From fbdcbd9fe826a01808479a13c85e27ab95e02974 Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Wed, 14 Feb 2024 23:31:38 -0800 Subject: [PATCH 24/25] Immediately trigger a batch send on root run end in JS (#441) @nfcampos @hinthornw --------- Co-authored-by: William FH <13333726+hinthornw@users.noreply.github.com> --- js/src/client.ts | 124 +++++++++++++++++--------- js/src/tests/batch_client.int.test.ts | 2 +- js/src/tests/batch_client.test.ts | 44 ++++----- js/src/tests/client.int.test.ts | 2 +- js/src/tests/run_trees.int.test.ts | 2 +- 5 files changed, 108 insertions(+), 66 deletions(-) diff --git a/js/src/client.ts b/js/src/client.ts index 743899c0d..2d4e98717 100644 --- a/js/src/client.ts +++ b/js/src/client.ts @@ -220,6 +220,38 @@ function assertUuid(str: string): void { } } +export class Queue { + items: [T, () => void][] = []; + + get size() { + return this.items.length; + } + + push(item: T): Promise { + // this.items.push is synchronous with promise creation: + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/Promise + return new Promise((resolve) => { + this.items.push([item, resolve]); + }); + } + + pop(upToN: number): [T[], () => void] { + if (upToN < 1) { + throw new Error("Number of items to pop off may not be less than 1."); + } + const popped: typeof this.items = []; + while (popped.length < upToN && this.items.length) { + const item = this.items.shift(); + if (item) { + popped.push(item); + } else { + break; + } + } + return [popped.map((it) => it[0]), () => popped.forEach((it) => it[1]())]; + } +} + export class Client { private apiKey?: string; @@ -241,11 +273,11 @@ export class Client { private sampledPostUuids = new Set(); - private autoBatchTracing = false; + private autoBatchTracing = true; private batchEndpointSupported?: boolean; - private pendingAutoBatchedRuns: AutoBatchQueueItem[] = []; + private autoBatchQueue = new Queue(); private pendingAutoBatchedRunLimit = 100; @@ -487,57 +519,56 @@ export class Client { } } - private async triggerAutoBatchSend(runs?: AutoBatchQueueItem[]) { - let batch = runs; - if (batch === undefined) { - batch = this.pendingAutoBatchedRuns.slice( - 0, - this.pendingAutoBatchedRunLimit - ); - this.pendingAutoBatchedRuns = this.pendingAutoBatchedRuns.slice( + private async drainAutoBatchQueue() { + while (this.autoBatchQueue.size >= 0) { + const [batch, done] = this.autoBatchQueue.pop( this.pendingAutoBatchedRunLimit ); + if (!batch.length) { + done(); + return; + } + try { + await this.batchIngestRuns({ + runCreates: batch + .filter((item) => item.action === "create") + .map((item) => item.item) as RunCreate[], + runUpdates: batch + .filter((item) => item.action === "update") + .map((item) => item.item) as RunUpdate[], + }); + } finally { + done(); + } } - await this.batchIngestRuns({ - runCreates: batch - .filter((item) => item.action === "create") - .map((item) => item.item) as RunCreate[], - runUpdates: batch - .filter((item) => item.action === "update") - .map((item) => item.item) as RunUpdate[], - }); } - private appendRunCreateToAutoBatchQueue(item: AutoBatchQueueItem) { + private async processRunOperation( + item: AutoBatchQueueItem, + immediatelyTriggerBatch?: boolean + ) { const oldTimeout = this.autoBatchTimeout; clearTimeout(this.autoBatchTimeout); this.autoBatchTimeout = undefined; - this.pendingAutoBatchedRuns.push(item); - while ( - this.pendingAutoBatchedRuns.length >= this.pendingAutoBatchedRunLimit + const itemPromise = this.autoBatchQueue.push(item); + if ( + immediatelyTriggerBatch || + this.autoBatchQueue.size > this.pendingAutoBatchedRunLimit ) { - const batch = this.pendingAutoBatchedRuns.slice( - 0, - this.pendingAutoBatchedRunLimit - ); - this.pendingAutoBatchedRuns = this.pendingAutoBatchedRuns.slice( - this.pendingAutoBatchedRunLimit - ); - void this.triggerAutoBatchSend(batch); + await this.drainAutoBatchQueue(); } - if (this.pendingAutoBatchedRuns.length > 0) { - if (!oldTimeout) { - this.autoBatchTimeout = setTimeout(() => { - this.autoBatchTimeout = undefined; - void this.triggerAutoBatchSend(); - }, this.autoBatchInitialDelayMs); - } else { - this.autoBatchTimeout = setTimeout(() => { + if (this.autoBatchQueue.size > 0) { + this.autoBatchTimeout = setTimeout( + () => { this.autoBatchTimeout = undefined; - void this.triggerAutoBatchSend(); - }, this.autoBatchAggregationDelayMs); - } + void this.drainAutoBatchQueue(); + }, + oldTimeout + ? this.autoBatchAggregationDelayMs + : this.autoBatchInitialDelayMs + ); } + return itemPromise; } protected async batchEndpointIsSupported() { @@ -573,7 +604,7 @@ export class Client { runCreate.trace_id !== undefined && runCreate.dotted_order !== undefined ) { - this.appendRunCreateToAutoBatchQueue({ + void this.processRunOperation({ action: "create", item: runCreate, }); @@ -705,7 +736,14 @@ export class Client { data.trace_id !== undefined && data.dotted_order !== undefined ) { - this.appendRunCreateToAutoBatchQueue({ action: "update", item: data }); + if (run.end_time !== undefined && data.parent_run_id === undefined) { + // Trigger a batch as soon as a root trace ends and block to ensure trace finishes + // in serverless environments. + await this.processRunOperation({ action: "update", item: data }, true); + return; + } else { + void this.processRunOperation({ action: "update", item: data }); + } return; } const headers = { ...this.headers, "Content-Type": "application/json" }; diff --git a/js/src/tests/batch_client.int.test.ts b/js/src/tests/batch_client.int.test.ts index 51a86a6e9..095ff0d05 100644 --- a/js/src/tests/batch_client.int.test.ts +++ b/js/src/tests/batch_client.int.test.ts @@ -48,7 +48,7 @@ async function waitUntilRunFound( } }, 30_000, - 1_000 + 5_000 ); } diff --git a/js/src/tests/batch_client.test.ts b/js/src/tests/batch_client.test.ts index 9b7a10568..7f69b8e49 100644 --- a/js/src/tests/batch_client.test.ts +++ b/js/src/tests/batch_client.test.ts @@ -129,7 +129,7 @@ describe("Batch client tracing", () => { ); }); - it("should create an example with the given input and generation", async () => { + it("should immediately trigger a batch on root run end", async () => { const client = new Client({ apiKey: "test-api-key", autoBatchTracing: true, @@ -160,10 +160,12 @@ describe("Batch client tracing", () => { dotted_order: dottedOrder, }); + // Wait for first batch to send await new Promise((resolve) => setTimeout(resolve, 300)); const endTime = Math.floor(new Date().getTime() / 1000); + // A root run finishing triggers the second batch await client.updateRun(runId, { outputs: { output: ["Hi"] }, dotted_order: dottedOrder, @@ -177,6 +179,7 @@ describe("Batch client tracing", () => { runId2 ); + // Will send in a third batch, even though it's triggered around the same time as the update await client.createRun({ id: runId2, project_name: projectName, @@ -191,6 +194,7 @@ describe("Batch client tracing", () => { const calledRequestParam: any = callSpy.mock.calls[0][2]; const calledRequestParam2: any = callSpy.mock.calls[1][2]; + const calledRequestParam3: any = callSpy.mock.calls[2][2]; expect(JSON.parse(calledRequestParam?.body)).toEqual({ post: [ expect.objectContaining({ @@ -207,17 +211,7 @@ describe("Batch client tracing", () => { }); expect(JSON.parse(calledRequestParam2?.body)).toEqual({ - post: [ - expect.objectContaining({ - id: runId2, - run_type: "llm", - inputs: { - text: "hello world 2", - }, - trace_id: runId2, - dotted_order: dottedOrder2, - }), - ], + post: [], patch: [ expect.objectContaining({ id: runId, @@ -230,6 +224,20 @@ describe("Batch client tracing", () => { }), ], }); + expect(JSON.parse(calledRequestParam3?.body)).toEqual({ + post: [ + expect.objectContaining({ + id: runId2, + run_type: "llm", + inputs: { + text: "hello world 2", + }, + trace_id: runId2, + dotted_order: dottedOrder2, + }), + ], + patch: [], + }); }); it("should send traces above the batch size and see even batches", async () => { @@ -272,9 +280,10 @@ describe("Batch client tracing", () => { await new Promise((resolve) => setTimeout(resolve, 10)); const calledRequestParam: any = callSpy.mock.calls[0][2]; - // Second batch should still be pending - expect(callSpy.mock.calls[1]).toBeUndefined(); - // First batch should fire as soon as it hits 10 + const calledRequestParam2: any = callSpy.mock.calls[1][2]; + + // Queue should drain as soon as size limit is reached, + // sending both batches expect(JSON.parse(calledRequestParam?.body)).toEqual({ post: runIds.slice(0, 10).map((runId, i) => expect.objectContaining({ @@ -289,11 +298,6 @@ describe("Batch client tracing", () => { patch: [], }); - // Wait for the aggregation delay - await new Promise((resolve) => setTimeout(resolve, 100)); - - const calledRequestParam2: any = callSpy.mock.calls[1][2]; - expect(JSON.parse(calledRequestParam2?.body)).toEqual({ post: runIds.slice(10).map((runId, i) => expect.objectContaining({ diff --git a/js/src/tests/client.int.test.ts b/js/src/tests/client.int.test.ts index bc5a97154..9509416b8 100644 --- a/js/src/tests/client.int.test.ts +++ b/js/src/tests/client.int.test.ts @@ -68,7 +68,7 @@ async function waitUntilRunFound( } }, 180_000, - 1_000 + 5_000 ); } diff --git a/js/src/tests/run_trees.int.test.ts b/js/src/tests/run_trees.int.test.ts index 749ebe2d7..2a0184d3b 100644 --- a/js/src/tests/run_trees.int.test.ts +++ b/js/src/tests/run_trees.int.test.ts @@ -43,7 +43,7 @@ async function pollRunsUntilCount( } }, 120_000, // Wait up to 120 seconds - 3000 // every 3 second + 5000 // every 5 second ); } From 4ef2c932ab3c1f3de6e46375db76bf2cc7864028 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Thu, 15 Feb 2024 00:12:49 -0800 Subject: [PATCH 25/25] 0.1.0 (#443) --- .../python-integration-tests/action.yml | 4 +- .github/workflows/integration_tests.yml | 4 +- .github/workflows/link-check.yml | 21 + .github/workflows/python_test.yml | 10 +- README.md | 40 +- js/README.md | 258 ++++++- js/package.json | 4 +- js/src/index.ts | 2 +- js/src/tests/run_trees.int.test.ts | 2 +- python/README.md | 130 ++++ python/poetry.lock | 665 +----------------- python/pyproject.toml | 4 +- 12 files changed, 468 insertions(+), 676 deletions(-) create mode 100644 .github/workflows/link-check.yml diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index 4fb91cc06..d8a06a5b4 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -28,7 +28,9 @@ runs: working-directory: python - name: Install dependencies - run: poetry install --with dev + run: | + poetry install --with dev + poetry run pip install -U langchain shell: bash working-directory: python diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml index b8ff3f553..23de6b5ea 100644 --- a/.github/workflows/integration_tests.yml +++ b/.github/workflows/integration_tests.yml @@ -40,7 +40,9 @@ jobs: - name: Install poetry run: pipx install poetry==1.4.2 - name: Install dependencies - run: poetry install --with dev + run: | + poetry install --with dev + poetry run pip install -U langchain - name: Run Python integration tests uses: ./.github/actions/python-integration-tests with: diff --git a/.github/workflows/link-check.yml b/.github/workflows/link-check.yml new file mode 100644 index 000000000..d4dbe89db --- /dev/null +++ b/.github/workflows/link-check.yml @@ -0,0 +1,21 @@ +name: Check Links + +on: + pull_request: + branches: + - main + push: + branches: + - main + schedule: + - cron: '0 5 * * *' + workflow_dispatch: + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Check links in Markdown files + uses: gaurav-nelson/github-action-markdown-link-check@v1 \ No newline at end of file diff --git a/.github/workflows/python_test.yml b/.github/workflows/python_test.yml index c01eb0cd7..dc9c7baa1 100644 --- a/.github/workflows/python_test.yml +++ b/.github/workflows/python_test.yml @@ -52,7 +52,9 @@ jobs: - name: Install poetry run: pipx install poetry==$POETRY_VERSION - name: Install dependencies - run: poetry install --with dev,lint + run: | + poetry install --with dev,lint + poetry run pip install -U langchain - name: Analysing the code with our lint run: make lint @@ -72,7 +74,9 @@ jobs: - name: Install poetry run: pipx install poetry==$POETRY_VERSION - name: Install dependencies - run: poetry install + run: | + poetry install --with dev + poetry run pip install -U langchain - name: Run tests run: make tests - shell: bash \ No newline at end of file + shell: bash diff --git a/README.md b/README.md index 6e4e58ab5..2fcefafd4 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ This repository contains the Python and Javascript SDK's for interacting with the [LangSmith platform](https://smith.langchain.com/). LangSmith helps your team debug, evaluate, and monitor your language models and intelligent agents. It works -with any LLM Application, including a native integration with the [LangChain Python](https://github.com/hwchase17/langchain) and [LangChain JS](https://github.com/hwchase17/langchainjs) open source libraries. +with any LLM Application, including a native integration with the [LangChain Python](https://github.com/langchain-ai/langchain) and [LangChain JS](https://github.com/langchain-ai/langchainjs) open source libraries. LangSmith is developed and maintained by [LangChain](https://langchain.com/), the company behind the LangChain framework. @@ -19,7 +19,7 @@ export LANGCHAIN_API_KEY=ls_... Then start tracing your app: -``` +```python import openai from langsmith import traceable from langsmith.wrappers import wrap_openai @@ -42,6 +42,42 @@ export LANGCHAIN_API_KEY=ls_... Then start tracing your app! +```javascript +import { traceable } from "langsmith/traceable"; +import { OpenAI } from "openai"; + +const client = new OpenAI(); + +const createCompletion = traceable( + openai.chat.completions.create.bind(openai.chat.completions), + { name: "OpenAI Chat Completion", run_type: "llm" } +); + +await createCompletion({ + model: "gpt-3.5-turbo", + messages: [{ content: "Hi there!", role: "user" }], +}); +``` + +``` +{ + id: 'chatcmpl-8sOWEOYVyehDlyPcBiaDtTxWvr9v6', + object: 'chat.completion', + created: 1707974654, + model: 'gpt-3.5-turbo-0613', + choices: [ + { + index: 0, + message: { role: 'assistant', content: 'Hello! How can I help you today?' }, + logprobs: null, + finish_reason: 'stop' + } + ], + usage: { prompt_tokens: 10, completion_tokens: 9, total_tokens: 19 }, + system_fingerprint: null +} +``` + ## Cookbook For tutorials on how to get more value out of LangSmith, check out the [Langsmith Cookbook](https://github.com/langchain-ai/langsmith-cookbook/tree/main) repo. diff --git a/js/README.md b/js/README.md index 5245b0d77..de971791f 100644 --- a/js/README.md +++ b/js/README.md @@ -71,21 +71,261 @@ console.log(response); ### Logging Traces Outside LangChain -_Note: this API is experimental and may change in the future_ - You can still use the LangSmith development platform without depending on any LangChain code. You can connect either by setting the appropriate environment variables, or by directly specifying the connection information in the RunTree. 1. **Copy the environment variables from the Settings Page and add them to your application.** -```typescript -process.env["LANGCHAIN_ENDPOINT"] = "https://api.smith.langchain.com"; // or your own server -process.env["LANGCHAIN_API_KEY"] = ""; -// process.env["LANGCHAIN_PROJECT"] = "My Project Name"; // Optional: "default" is used if not set +```shell +export LANGCHAIN_API_KEY= +# export LANGCHAIN_PROJECT="My Project Name" # Optional: "default" is used if not set +# export LANGCHAIN_ENDPOINT=https://api.smith.langchain.com # or your own server +``` + +## Integrations + +Langsmith's `traceable` wrapper function makes it easy to trace any function or LLM call in your own favorite framework. Below are some examples. + +### OpenAI SDK + + +The easiest ways to trace calls from the [OpenAI SDK](https://platform.openai.com/docs/api-reference) with LangSmith +is using the `traceable` wrapper function available in LangSmith 0.1.0 and up. + +In order to use, you first need to set your LangSmith API key: + +```shell +export LANGCHAIN_API_KEY= +``` + +Next, you will need to install the LangSmith SDK and the OpenAI SDK: + +```shell +npm install langsmith openai +``` + +After that, initialize your OpenAI client: + +```ts +import { OpenAI } from "openai"; + +const client = new OpenAI(); +``` + +Then, you can wrap the client methods you want to use by passing it to the `traceable` function like this: + +```ts +import { traceable } from "langsmith/traceable"; + +const createCompletion = traceable( + openai.chat.completions.create.bind(openai.chat.completions), + { name: "OpenAI Chat Completion", run_type: "llm" } +); +``` + +Note the use of `.bind` to preserve the function's context. The `run_type` field in the extra config object +marks the function as an LLM call, and enables token usage tracking for OpenAI. + +This new method takes the same exact arguments and has the same return type as the original method, +but will log everything to LangSmith! + +```ts +await createCompletion({ + model: "gpt-3.5-turbo", + messages: [{ content: "Hi there!", role: "user" }], +}); +``` + +``` +{ + id: 'chatcmpl-8sOWEOYVyehDlyPcBiaDtTxWvr9v6', + object: 'chat.completion', + created: 1707974654, + model: 'gpt-3.5-turbo-0613', + choices: [ + { + index: 0, + message: { role: 'assistant', content: 'Hello! How can I help you today?' }, + logprobs: null, + finish_reason: 'stop' + } + ], + usage: { prompt_tokens: 10, completion_tokens: 9, total_tokens: 19 }, + system_fingerprint: null +} +``` + +This also works for streaming: + +```ts +const stream = await createCompletion({ + model: "gpt-3.5-turbo", + stream: true, + messages: [{ content: "Hi there!", role: "user" }], +}); +``` + +```ts +for await (const chunk of stream) { + console.log(chunk); +} ``` -2. **Log traces using a RunTree.** +Oftentimes, you use the OpenAI client inside of other functions or as part of a longer +sequence. You can automatically get nested traces by using this wrapped method +within other functions wrapped with `traceable`. + +```ts +const nestedTrace = traceable(async (text: string) => { + const completion = await createCompletion({ + model: "gpt-3.5-turbo", + messages: [{ content: text, role: "user" }], + }); + return completion; +}); + +await nestedTrace("Why is the sky blue?"); +``` + +``` +{ + "id": "chatcmpl-8sPToJQLLVepJvyeTfzZMOMVIKjMo", + "object": "chat.completion", + "created": 1707978348, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "The sky appears blue because of a phenomenon known as Rayleigh scattering. The Earth's atmosphere is composed of tiny molecules, such as nitrogen and oxygen, which are much smaller than the wavelength of visible light. When sunlight interacts with these molecules, it gets scattered in all directions. However, shorter wavelengths of light (blue and violet) are scattered more compared to longer wavelengths (red, orange, and yellow). \n\nAs a result, when sunlight passes through the Earth's atmosphere, the blue and violet wavelengths are scattered in all directions, making the sky appear blue. This scattering of shorter wavelengths is also responsible for the vibrant colors observed during sunrise and sunset, when the sunlight has to pass through a thicker portion of the atmosphere, causing the longer wavelengths to dominate the scattered light." + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 13, + "completion_tokens": 154, + "total_tokens": 167 + }, + "system_fingerprint": null +} +``` + +:::tip +[Click here](https://smith.langchain.com/public/4af46ef6-b065-46dc-9cf0-70f1274edb01/r) to see an example LangSmith trace of the above. +::: + +## Next.js + +You can use the `traceable` wrapper function in Next.js apps to wrap arbitrary functions much like in the example above. + +One neat trick you can use for Next.js and other similar server frameworks is to wrap the entire exported handler for a route +to group traces for the any sub-runs. Here's an example: + +```ts +import { NextRequest, NextResponse } from "next/server"; + +import { OpenAI } from "openai"; +import { traceable } from "langsmith/traceable"; + +export const runtime = "edge"; + +const handler = traceable( + async function () { + const openai = new OpenAI(); + const createCompletion = traceable( + openai.chat.completions.create.bind(openai.chat.completions), + { name: "OpenAI Chat Completion", run_type: "llm" } + ); + + const completion = await createCompletion({ + model: "gpt-3.5-turbo", + messages: [{ content: "Why is the sky blue?", role: "user" }], + }); + + const response1 = completion.choices[0].message.content; + + const completion2 = await createCompletion({ + model: "gpt-3.5-turbo", + messages: [ + { content: "Why is the sky blue?", role: "user" }, + { content: response1, role: "assistant" }, + { content: "Cool thank you!", role: "user" }, + ], + }); + + const response2 = completion2.choices[0].message.content; + + return { + text: response2, + }; + }, + { + name: "Simple Next.js handler", + } +); + +export async function POST(req: NextRequest) { + const result = await handler(); + return NextResponse.json(result); +} +``` + +The two OpenAI calls within the handler will be traced with appropriate inputs, outputs, +and token usage information. + +:::tip +[Click here](https://smith.langchain.com/public/faaf26ad-8c59-4622-bcfe-b7d896733ca6/r) to see an example LangSmith trace of the above. +::: + +## Vercel AI SDK + +The [Vercel AI SDK](https://sdk.vercel.ai/docs) contains integrations with a variety of model providers. +Here's an example of how you can trace outputs in a Next.js handler: + +```ts +import { traceable } from 'langsmith/traceable'; +import { OpenAIStream, StreamingTextResponse } from 'ai'; + +// Note: There are no types for the Mistral API client yet. +import MistralClient from '@mistralai/mistralai'; + +const client = new MistralClient(process.env.MISTRAL_API_KEY || ''); + +export async function POST(req: Request) { + // Extract the `messages` from the body of the request + const { messages } = await req.json(); + + const mistralChatStream = traceable( + client.chatStream.bind(client), + { + name: "Mistral Stream", + run_type: "llm", + } + ); + + const response = await mistralChatStream({ + model: 'mistral-tiny', + maxTokens: 1000, + messages, + }); + + // Convert the response into a friendly text-stream. The Mistral client responses are + // compatible with the Vercel AI SDK OpenAIStream adapter. + const stream = OpenAIStream(response as any); + + // Respond with the stream + return new StreamingTextResponse(stream); +} +``` + +See the [AI SDK docs](https://sdk.vercel.ai/docs) for more examples. + + +#### Alternatives: **Log traces using a RunTree.** A RunTree tracks your application. Each RunTree object is required to have a name and run_type. These and other important attributes are as follows: @@ -189,7 +429,9 @@ await parentRun.end({ await parentRun.patchRun(); ``` -### Create a Dataset from Existing Runs +## Evaluation + +#### Create a Dataset from Existing Runs Once your runs are stored in LangSmith, you can convert them into a dataset. For this example, we will do so using the Client, but you can also do this using diff --git a/js/package.json b/js/package.json index 4eaeea10f..4c0a193ad 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "langsmith", - "version": "0.0.70", + "version": "0.1.0", "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", "packageManager": "yarn@1.22.19", "files": [ @@ -131,4 +131,4 @@ }, "./package.json": "./package.json" } -} +} \ No newline at end of file diff --git a/js/src/index.ts b/js/src/index.ts index a34d26520..db32a5549 100644 --- a/js/src/index.ts +++ b/js/src/index.ts @@ -11,4 +11,4 @@ export type { export { RunTree, type RunTreeConfig } from "./run_trees.js"; // Update using yarn bump-version -export const __version__ = "0.0.70"; +export const __version__ = "0.1.0"; diff --git a/js/src/tests/run_trees.int.test.ts b/js/src/tests/run_trees.int.test.ts index 2a0184d3b..09c578afe 100644 --- a/js/src/tests/run_trees.int.test.ts +++ b/js/src/tests/run_trees.int.test.ts @@ -51,7 +51,7 @@ test.concurrent( "Test post and patch run", async () => { const projectName = `__test_run_tree`; - const langchainClient = new Client({}); + const langchainClient = new Client({ timeout_ms: 30000 }); try { await langchainClient.readProject({ projectName }); await langchainClient.deleteProject({ projectName }); diff --git a/python/README.md b/python/README.md index c4fe2ec6f..d0e0b82e1 100644 --- a/python/README.md +++ b/python/README.md @@ -275,6 +275,136 @@ for run in runs: client.evaluate_run(run, evaluator) ``` + +## Integrations + +LangSmith easily integrates with your favorite LLM framework. + +## OpenAI SDK + + +We provide a convenient wrapper for the [OpenAI SDK](https://platform.openai.com/docs/api-reference). + +In order to use, you first need to set your LangSmith API key. + +```shell +export LANGCHAIN_API_KEY= +``` + +Next, you will need to install the LangSmith SDK: + +```shell +pip install -U langsmith +``` + +After that, you can wrap the OpenAI client: + +```python +from openai import OpenAI +from langsmith import wrappers + +client = wrappers.wrap_openai(OpenAI()) +``` + +Now, you can use the OpenAI client as you normally would, but now everything is logged to LangSmith! + +```python +client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test"}], +) +``` + +Oftentimes, you use the OpenAI client inside of other functions. +You can get nested traces by using this wrapped client and decorating those functions with `@traceable`. +See [this documentation](https://docs.smith.langchain.com/tracing/faq/logging_and_viewing) for more documentation how to use this decorator + +```python +from langsmith import traceable + +@traceable() +def my_function(text: str): + return client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": f"Say {text}"}], + ) + +my_function("hello world") +``` + +# Instructor + +We provide a convenient integration with [Instructor](https://jxnl.github.io/instructor/), largely by virtue of it essentially just using the OpenAI SDK. + +In order to use, you first need to set your LangSmith API key. + +```shell +export LANGCHAIN_API_KEY= +``` + +Next, you will need to install the LangSmith SDK: + +```shell +pip install -U langsmith +``` + +After that, you can wrap the OpenAI client: + +```python +from openai import OpenAI +from langsmith import wrappers + +client = wrappers.wrap_openai(OpenAI()) +``` + +After this, you can patch the OpenAI client using `instructor`: + +```python +import instructor + +client = instructor.patch(OpenAI()) +``` + +Now, you can use `instructor` as you normally would, but now everything is logged to LangSmith! + +```python +from pydantic import BaseModel + + +class UserDetail(BaseModel): + name: str + age: int + + +user = client.chat.completions.create( + model="gpt-3.5-turbo", + response_model=UserDetail, + messages=[ + {"role": "user", "content": "Extract Jason is 25 years old"}, + ] +) +``` + +Oftentimes, you use `instructor` inside of other functions. +You can get nested traces by using this wrapped client and decorating those functions with `@traceable`. +See [this documentation](https://docs.smith.langchain.com/tracing/faq/logging_and_viewing) for more documentation how to use this decorator + +```python +@traceable() +def my_function(text: str) -> UserDetail: + return client.chat.completions.create( + model="gpt-3.5-turbo", + response_model=UserDetail, + messages=[ + {"role": "user", "content": f"Extract {text}"}, + ] + ) + + +my_function("Jason is 25 years old") +``` + + ## Additional Documentation To learn more about the LangSmith platform, check out the [docs](https://docs.smith.langchain.com/docs/). diff --git a/python/poetry.lock b/python/poetry.lock index fd5b73d79..adc86d524 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,115 +1,5 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. -[[package]] -name = "aiohttp" -version = "3.9.2" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:772fbe371788e61c58d6d3d904268e48a594ba866804d08c995ad71b144f94cb"}, - {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:edd4f1af2253f227ae311ab3d403d0c506c9b4410c7fc8d9573dec6d9740369f"}, - {file = "aiohttp-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cfee9287778399fdef6f8a11c9e425e1cb13cc9920fd3a3df8f122500978292b"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc158466f6a980a6095ee55174d1de5730ad7dec251be655d9a6a9dd7ea1ff9"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ec82f45d57c9a65a1ead3953b51c704f9587440e6682f689da97f3e8defa35"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abeb813a18eb387f0d835ef51f88568540ad0325807a77a6e501fed4610f864e"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc91d07280d7d169f3a0f9179d8babd0ee05c79d4d891447629ff0d7d8089ec2"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b65e861f4bebfb660f7f0f40fa3eb9f2ab9af10647d05dac824390e7af8f75b7"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04fd8ffd2be73d42bcf55fd78cde7958eeee6d4d8f73c3846b7cba491ecdb570"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d8d962b439a859b3ded9a1e111a4615357b01620a546bc601f25b0211f2da81"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:8ceb658afd12b27552597cf9a65d9807d58aef45adbb58616cdd5ad4c258c39e"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0e4ee4df741670560b1bc393672035418bf9063718fee05e1796bf867e995fad"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2dec87a556f300d3211decf018bfd263424f0690fcca00de94a837949fbcea02"}, - {file = "aiohttp-3.9.2-cp310-cp310-win32.whl", hash = "sha256:3e1a800f988ce7c4917f34096f81585a73dbf65b5c39618b37926b1238cf9bc4"}, - {file = "aiohttp-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea510718a41b95c236c992b89fdfc3d04cc7ca60281f93aaada497c2b4e05c46"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6aaa6f99256dd1b5756a50891a20f0d252bd7bdb0854c5d440edab4495c9f973"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a27d8c70ad87bcfce2e97488652075a9bdd5b70093f50b10ae051dfe5e6baf37"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:54287bcb74d21715ac8382e9de146d9442b5f133d9babb7e5d9e453faadd005e"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb3d05569aa83011fcb346b5266e00b04180105fcacc63743fc2e4a1862a891"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8534e7d69bb8e8d134fe2be9890d1b863518582f30c9874ed7ed12e48abe3c4"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd9d5b989d57b41e4ff56ab250c5ddf259f32db17159cce630fd543376bd96b"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa6904088e6642609981f919ba775838ebf7df7fe64998b1a954fb411ffb4663"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda42eb410be91b349fb4ee3a23a30ee301c391e503996a638d05659d76ea4c2"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:193cc1ccd69d819562cc7f345c815a6fc51d223b2ef22f23c1a0f67a88de9a72"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b9f1cb839b621f84a5b006848e336cf1496688059d2408e617af33e3470ba204"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d22a0931848b8c7a023c695fa2057c6aaac19085f257d48baa24455e67df97ec"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4112d8ba61fbd0abd5d43a9cb312214565b446d926e282a6d7da3f5a5aa71d36"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c4ad4241b52bb2eb7a4d2bde060d31c2b255b8c6597dd8deac2f039168d14fd7"}, - {file = "aiohttp-3.9.2-cp311-cp311-win32.whl", hash = "sha256:ee2661a3f5b529f4fc8a8ffee9f736ae054adfb353a0d2f78218be90617194b3"}, - {file = "aiohttp-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:4deae2c165a5db1ed97df2868ef31ca3cc999988812e82386d22937d9d6fed52"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6f4cdba12539215aaecf3c310ce9d067b0081a0795dd8a8805fdb67a65c0572a"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:84e843b33d5460a5c501c05539809ff3aee07436296ff9fbc4d327e32aa3a326"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8008d0f451d66140a5aa1c17e3eedc9d56e14207568cd42072c9d6b92bf19b52"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61c47ab8ef629793c086378b1df93d18438612d3ed60dca76c3422f4fbafa792"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc71f748e12284312f140eaa6599a520389273174b42c345d13c7e07792f4f57"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1c3a4d0ab2f75f22ec80bca62385db2e8810ee12efa8c9e92efea45c1849133"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a87aa0b13bbee025faa59fa58861303c2b064b9855d4c0e45ec70182bbeba1b"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc0d04688b9f4a7854c56c18aa7af9e5b0a87a28f934e2e596ba7e14783192"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1956e3ac376b1711c1533266dec4efd485f821d84c13ce1217d53e42c9e65f08"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:114da29f39eccd71b93a0fcacff178749a5c3559009b4a4498c2c173a6d74dff"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3f17999ae3927d8a9a823a1283b201344a0627272f92d4f3e3a4efe276972fe8"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f31df6a32217a34ae2f813b152a6f348154f948c83213b690e59d9e84020925c"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7a75307ffe31329928a8d47eae0692192327c599113d41b278d4c12b54e1bd11"}, - {file = "aiohttp-3.9.2-cp312-cp312-win32.whl", hash = "sha256:972b63d589ff8f305463593050a31b5ce91638918da38139b9d8deaba9e0fed7"}, - {file = "aiohttp-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:200dc0246f0cb5405c80d18ac905c8350179c063ea1587580e3335bfc243ba6a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:158564d0d1020e0d3fe919a81d97aadad35171e13e7b425b244ad4337fc6793a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da1346cd0ccb395f0ed16b113ebb626fa43b7b07fd7344fce33e7a4f04a8897a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eaa9256de26ea0334ffa25f1913ae15a51e35c529a1ed9af8e6286dd44312554"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1543e7fb00214fb4ccead42e6a7d86f3bb7c34751ec7c605cca7388e525fd0b4"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:186e94570433a004e05f31f632726ae0f2c9dee4762a9ce915769ce9c0a23d89"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d52d20832ac1560f4510d68e7ba8befbc801a2b77df12bd0cd2bcf3b049e52a4"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c45e4e815ac6af3b72ca2bde9b608d2571737bb1e2d42299fc1ffdf60f6f9a1"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa906b9bdfd4a7972dd0628dbbd6413d2062df5b431194486a78f0d2ae87bd55"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:68bbee9e17d66f17bb0010aa15a22c6eb28583edcc8b3212e2b8e3f77f3ebe2a"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c189b64bd6d9a403a1a3f86a3ab3acbc3dc41a68f73a268a4f683f89a4dec1f"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8a7876f794523123bca6d44bfecd89c9fec9ec897a25f3dd202ee7fc5c6525b7"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d23fba734e3dd7b1d679b9473129cd52e4ec0e65a4512b488981a56420e708db"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b141753be581fab842a25cb319f79536d19c2a51995d7d8b29ee290169868eab"}, - {file = "aiohttp-3.9.2-cp38-cp38-win32.whl", hash = "sha256:103daf41ff3b53ba6fa09ad410793e2e76c9d0269151812e5aba4b9dd674a7e8"}, - {file = "aiohttp-3.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:328918a6c2835861ff7afa8c6d2c70c35fdaf996205d5932351bdd952f33fa2f"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5264d7327c9464786f74e4ec9342afbbb6ee70dfbb2ec9e3dfce7a54c8043aa3"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07205ae0015e05c78b3288c1517afa000823a678a41594b3fdc870878d645305"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0a1e638cffc3ec4d4784b8b4fd1cf28968febc4bd2718ffa25b99b96a741bd"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d43302a30ba1166325974858e6ef31727a23bdd12db40e725bec0f759abce505"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16a967685907003765855999af11a79b24e70b34dc710f77a38d21cd9fc4f5fe"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fa3ee92cd441d5c2d07ca88d7a9cef50f7ec975f0117cd0c62018022a184308"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b500c5ad9c07639d48615a770f49618130e61be36608fc9bc2d9bae31732b8f"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c07327b368745b1ce2393ae9e1aafed7073d9199e1dcba14e035cc646c7941bf"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc7d6502c23a0ec109687bf31909b3fb7b196faf198f8cff68c81b49eb316ea9"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:07be2be7071723c3509ab5c08108d3a74f2181d4964e869f2504aaab68f8d3e8"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:122468f6fee5fcbe67cb07014a08c195b3d4c41ff71e7b5160a7bcc41d585a5f"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:00a9abcea793c81e7f8778ca195a1714a64f6d7436c4c0bb168ad2a212627000"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a9825fdd64ecac5c670234d80bb52bdcaa4139d1f839165f548208b3779c6c6"}, - {file = "aiohttp-3.9.2-cp39-cp39-win32.whl", hash = "sha256:5422cd9a4a00f24c7244e1b15aa9b87935c85fb6a00c8ac9b2527b38627a9211"}, - {file = "aiohttp-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:7d579dcd5d82a86a46f725458418458fa43686f6a7b252f2966d359033ffc8ab"}, - {file = "aiohttp-3.9.2.tar.gz", hash = "sha256:b0ad0a5e86ce73f5368a164c10ada10504bf91869c05ab75d982c6048217fbf7"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - [[package]] name = "anyio" version = "3.7.1" @@ -131,17 +21,6 @@ doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd- test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - [[package]] name = "attrs" version = "23.1.0" @@ -406,13 +285,13 @@ toml = ["tomli"] [[package]] name = "dataclasses-json" -version = "0.6.1" +version = "0.6.4" description = "Easily serialize dataclasses to and from JSON." optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "dataclasses_json-0.6.1-py3-none-any.whl", hash = "sha256:1bd8418a61fe3d588bb0079214d7fb71d44937da40742b787256fd53b26b6c80"}, - {file = "dataclasses_json-0.6.1.tar.gz", hash = "sha256:a53c220c35134ce08211a1057fd0e5bf76dc5331627c6b241cacbc570a89faae"}, + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, ] [package.dependencies] @@ -472,146 +351,6 @@ files = [ [package.dependencies] python-dateutil = ">=2.7" -[[package]] -name = "frozenlist" -version = "1.4.0" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, -] - -[[package]] -name = "greenlet" -version = "3.0.1" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, - {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, - {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, - {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, - {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, - {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, - {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, - {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, - {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, - {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, - {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, - {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, - {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, - {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, - {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, -] - -[package.extras] -docs = ["Sphinx"] -test = ["objgraph", "psutil"] - [[package]] name = "h11" version = "0.14.0" @@ -690,173 +429,26 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpointer" -version = "2.4" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, -] - -[[package]] -name = "langchain" -version = "0.0.333" -description = "Building applications with LLMs through composability" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [ - {file = "langchain-0.0.333-py3-none-any.whl", hash = "sha256:7ee619bbdccfe15bcc4e255a30b5f2e75f9d230cdbaf572f4063dc59d4b03af6"}, - {file = "langchain-0.0.333.tar.gz", hash = "sha256:64e00ecee3dd316a97f825429e286a1b207315a0cc753bcc1cd5cfcb92abbc39"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -anyio = "<4.0" -async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -dataclasses-json = ">=0.5.7,<0.7" -jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.0.62,<0.1.0" -numpy = ">=1,<2" -pydantic = ">=1,<3" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.8.3,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.10.1,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] -azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (>=0,<1)"] -clarifai = ["clarifai (>=9.1.0)"] -cli = ["typer (>=0.9.0,<0.10.0)"] -cohere = ["cohere (>=4,<5)"] -docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] -embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "dashvector (>=1.0.1,<2.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.6.0,<0.7.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (>=0,<1)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] -javascript = ["esprima (>=4.0.1,<5.0.0)"] -llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] -openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.6.0)"] -qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] -text-helpers = ["chardet (>=5.1.0,<6.0.0)"] - [[package]] name = "marshmallow" -version = "3.20.1" +version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, - {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, + {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, + {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] +dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["pre-commit (>=2.4,<4.0)"] tests = ["pytest", "pytz", "simplejson"] -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - [[package]] name = "mypy" version = "1.6.1" @@ -1243,55 +835,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - [[package]] name = "requests" version = "2.31.0" @@ -1361,107 +904,6 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -[[package]] -name = "sqlalchemy" -version = "2.0.23" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.2.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] - -[[package]] -name = "tenacity" -version = "8.2.3" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, - {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, -] - -[package.extras] -doc = ["reno", "sphinx", "tornado (>=4.5)"] - [[package]] name = "tomli" version = "2.0.1" @@ -1622,94 +1064,7 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "54bcb7f0ce64b9375ca8b3a70932f3e4ae020e2e0e021e7d6fede195186de4a2" +content-hash = "603d245853deae3be2a4791bde669d0e7c12e3fb8f0f4bf396b8fca0a7234af1" diff --git a/python/pyproject.toml b/python/pyproject.toml index 1bd0cf6cf..5a0e9d7b0 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langsmith" -version = "0.0.92" +version = "0.1.0" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." authors = ["LangChain "] license = "MIT" @@ -40,7 +40,6 @@ types-requests = "^2.31.0.1" pandas-stubs = "^2.0.1.230501" types-pyyaml = "^6.0.12.10" pytest-asyncio = "^0.21.0" -langchain = "^0.0.333" types-psutil = "^5.9.5.16" psutil = "^5.9.5" freezegun = "^1.2.2" @@ -48,6 +47,7 @@ pytest-subtests = "^0.11.0" pytest-watcher = "^0.3.4" pytest-xdist = "^3.5.0" pytest-cov = "^4.1.0" +dataclasses-json = "^0.6.4" [tool.poetry.group.lint.dependencies] openai = "^1.10"